Repository: johntruckenbrodt/pyroSAR
Branch: main
Commit: 293c2dc4b40f
Files: 112
Total size: 1.3 MB
Directory structure:
gitextract_9rn0un2a/
├── .github/
│ ├── ISSUE_TEMPLATE/
│ │ └── bug_report.md
│ └── workflows/
│ ├── conda-install.yml
│ └── python-publish.yml
├── .gitignore
├── .travis.yml
├── CONTRIBUTING.md
├── LICENSE.txt
├── MANIFEST.in
├── README.md
├── appveyor.yml
├── datacube_prepare.ipynb
├── docs/
│ ├── Makefile
│ ├── make.bat
│ └── source/
│ ├── about/
│ │ ├── changelog.rst
│ │ ├── projects.rst
│ │ ├── publications.rst
│ │ └── references.rst
│ ├── api/
│ │ ├── ancillary.rst
│ │ ├── archive.rst
│ │ ├── auxdata.rst
│ │ ├── datacube.rst
│ │ ├── drivers.rst
│ │ ├── examine.rst
│ │ ├── figures/
│ │ │ ├── snap_geocode.graphml
│ │ │ └── workflow_readme.txt
│ │ ├── gamma/
│ │ │ ├── api.rst
│ │ │ ├── auxil.rst
│ │ │ ├── dem.rst
│ │ │ ├── error.rst
│ │ │ ├── figures/
│ │ │ │ └── gamma_geocode.graphml
│ │ │ ├── index.rst
│ │ │ └── util.rst
│ │ ├── sentinel-1.rst
│ │ └── snap.rst
│ ├── conf.py
│ ├── general/
│ │ ├── DEM.rst
│ │ ├── OSV.rst
│ │ ├── configuration.rst
│ │ ├── filenaming.rst
│ │ ├── installation.rst
│ │ ├── logging.rst
│ │ ├── processing.rst
│ │ └── snap.rst
│ ├── index.rst
│ └── references.bib
├── environment-dev.yml
├── environment-doc.yml
├── environment.yml
├── pyproject.toml
├── pyroSAR/
│ ├── ERS/
│ │ ├── __init__.py
│ │ ├── auxil.py
│ │ └── mapping.py
│ ├── S1/
│ │ ├── __init__.py
│ │ ├── auxil.py
│ │ ├── linesimplify.py
│ │ └── polysimplify.py
│ ├── __init__.py
│ ├── ancillary.py
│ ├── archive.py
│ ├── auxdata.py
│ ├── config.py
│ ├── datacube_util.py
│ ├── drivers.py
│ ├── examine.py
│ ├── gamma/
│ │ ├── __init__.py
│ │ ├── api.py
│ │ ├── auxil.py
│ │ ├── dem.py
│ │ ├── error.py
│ │ ├── parser.py
│ │ ├── parser_demo.py
│ │ └── util.py
│ ├── install/
│ │ ├── download_egm96_15.gtx.sh
│ │ ├── download_testdata.sh
│ │ └── install_deps.sh
│ ├── patterns.py
│ ├── snap/
│ │ ├── __init__.py
│ │ ├── auxil.py
│ │ ├── data/
│ │ │ ├── collect_suffices.py
│ │ │ ├── snap.auxdata.properties
│ │ │ └── snap.suffices.properties
│ │ ├── recipes/
│ │ │ ├── base.xml
│ │ │ └── blank.xml
│ │ └── util.py
│ └── xml_util.py
├── readthedocs.yml
├── requirements-dev.txt
├── requirements.txt
└── tests/
├── conftest.py
├── data/
│ ├── ASA_IMS_1PNESA20040703_205338_000000182028_00172_12250_00001672562030318361237.N1
│ ├── S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif
│ ├── S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif.aux.xml
│ ├── SAR_IMP_1PXESA19960808_205906_00000017G158_00458_26498_2615.E1
│ ├── archive_outdated.csv
│ ├── dem.par
│ └── mli.par
├── installtest_gdal_geos.py
├── installtest_ogr_sqlite.py
├── installtest_spatialite.py
├── test_ancillary.py
├── test_archive.py
├── test_auxdata.py
├── test_config.py
├── test_drivers.py
├── test_examine.py
├── test_gamma.py
├── test_gamma_args.py
├── test_license.py
├── test_osv.py
├── test_snap.py
├── test_snap_exe.py
└── test_xml_util.py
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.md
================================================
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
- which operating system are you using?
e.g. Windows 10, Ubuntu 18.4, etc.
- which environment is pyroSAR running in?
e.g. system-wide Python installation, Anaconda environment, virtual environment, etc.
- which version of pyroSAR are you using?
one installed via conda, pip or a clone of the GitHub repository?
- which function of pyroSAR did you call with which parameters?
- if applicable, which version of SNAP or GAMMA are you using in pyroSAR?
- the full error message
================================================
FILE: .github/workflows/conda-install.yml
================================================
name: conda build
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
types: [ opened, reopened, synchronize ]
workflow_dispatch:
inputs:
debug_enabled:
type: boolean
description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)'
required: false
default: false
permissions:
contents: read
env:
SNAP_VERSION: "13.0"
jobs:
build-linux:
runs-on: ubuntu-latest
defaults:
run:
shell: micromamba-shell {0}
services:
postgres:
image: postgis/postgis:16-3.4
env:
POSTGRES_PASSWORD: Password12!
ports:
- 5432:5432
steps:
- uses: actions/checkout@v3
- name: Set up python environment
uses: mamba-org/setup-micromamba@v2
with:
environment-file: environment-dev.yml
cache-environment: true
init-shell: bash
generate-run-shell: true
post-cleanup: 'all'
- name: Install ESA SNAP
run: |
wget -nv https://download.esa.int/step/snap/$SNAP_VERSION/installers/esa-snap_all_linux-$SNAP_VERSION.0.sh
bash esa-snap_all_linux-$SNAP_VERSION.0.sh -q -dir $GITHUB_ACTION_PATH/esa-snap
- name: Set paths and variables
run: |
echo "$CONDA/bin" >> $GITHUB_PATH
echo "$GITHUB_ACTION_PATH/esa-snap/bin" >> $GITHUB_PATH
echo "PROJ_DATA=$CONDA/share/proj" >> $GITHUB_ENV
- name: Lint with flake8
run: |
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Install pyroSAR
run: |
pip install .
- name: Test with pytest
run: |
coverage run -m pytest
coverage xml
env:
PGUSER: postgres
PGPASSWORD: Password12!
- name: Publish to coveralls.io
if: ${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
uses: coverallsapp/github-action@v2.3.0
with:
github-token: ${{ github.token }}
format: cobertura
build-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v6
- name: Set up micromamba environment
uses: mamba-org/setup-micromamba@v2
with:
environment-file: environment-dev.yml
cache-environment: true
init-shell: bash
generate-run-shell: true
post-cleanup: 'all'
- uses: nyurik/action-setup-postgis@v2.2
with:
cached-dir: C:\downloads
postgres-version: 17
# ---------------- SNAP cache ----------------
- name: Cache SNAP zip
id: cache-snap
uses: actions/cache@v4
with:
path: |
snap.zip
key: snap-${{ env.SNAP_VERSION }}-windows
- name: Download and install SNAP (cache miss)
if: steps.cache-snap.outputs.cache-hit != 'true'
shell: cmd
run: |
echo Downloading SNAP installer...
curl -L -o snap.exe https://download.esa.int/step/snap/%SNAP_VERSION%/installers/esa-snap_all_windows-%SNAP_VERSION%.0.exe
echo Installing SNAP...
start /wait snap.exe -q -dir C:\esa-snap
echo Creating zip archive for cache...
powershell Compress-Archive -Path C:\esa-snap -DestinationPath snap.zip
- name: Restore SNAP from zip (cache hit)
if: steps.cache-snap.outputs.cache-hit == 'true'
shell: powershell
run: |
Write-Host "Unzipping cached SNAP..."
Expand-Archive snap.zip C:\
- name: Add SNAP to PATH
shell: powershell
run: |
echo "C:\esa-snap\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
# ---------------- Python steps (micromamba env) ----------------
- name: Verify micromamba python
shell: bash -el {0}
run: |
where python
python -V
where pip
- name: Install pyroSAR
shell: bash -el {0}
run: |
python -m pip install .
- name: Setup tmate session (debug)
uses: mxschmitt/action-tmate@v3
if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }}
- name: Test with pytest
shell: bash -el {0}
run: |
pytest -vv
env:
PGUSER: postgres
PGPASSWORD: postgres
================================================
FILE: .github/workflows/python-publish.yml
================================================
# This workflow will upload a Python Package using Twine when a release is created
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
name: Upload Python Package
on:
release:
types: [ published ]
permissions:
contents: read
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
================================================
FILE: .gitignore
================================================
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
.idea/
out/
dev_*
# OSX tempfiles
.DS_Store
================================================
FILE: .travis.yml
================================================
dist: jammy
language: python
sudo: required
cache:
directories:
- ~/.cache/pip
env:
global:
- PIP_WHEEL_DIR=$HOME/.cache/pip/wheels
- PIP_FIND_LINKS=file://$HOME/.cache/pip/wheels
- TESTDATA_DIR=$HOME/testdata
- PGUSER=travis
- PGPASSWORD=Password12!
- SNAP_VERSION=10
addons:
postgresql: '14'
apt:
sources:
- sourceline: 'ppa:ubuntugis/ppa'
packages:
- libgdal-dev
- gdal-bin
- libsqlite3-mod-spatialite
- libproj-dev
- python3-dev
- postgresql-14-postgis-3
services:
- postgresql
python:
- '3.10'
before_install:
- export SNAP_INSTALLER=esa-snap_sentinel_linux-"$SNAP_VERSION".0.0.sh
- wget -O $SNAP_INSTALLER https://download.esa.int/step/snap/"$SNAP_VERSION"_0/installers/"$SNAP_INSTALLER"
- bash $SNAP_INSTALLER -q
- export PATH=$PATH:/opt/snap/bin
install:
- mkdir -p ~/.cache/pip/wheels # remove warning "Url 'file:///home/travis/.cache/pip/wheels' is ignored: it is neither a file nor a directory."
- pip install --ignore-installed setuptools pip six certifi # install packages inside the venv if the system version is too old
- pip install numpy
- pip install GDAL==$(gdal-config --version) --global-option=build_ext --global-option="$(gdal-config --cflags)"
- pip install coveralls coverage
- pip install .[test]
#before_script:
# - travis_wait 40 . ./pyroSAR/install/download_testdata.sh
before_script:
- psql -U $PGUSER -c 'create database travis_ci_test'
- psql -U $PGUSER -c "create extension if not exists postgis"
- psql -U $PGUSER -c "alter user ${PGUSER} password '${PGPASSWORD}'"
script:
- coverage run -m pytest
after_success:
- coveralls
================================================
FILE: CONTRIBUTING.md
================================================
# Contributing to pyroSAR
First off, thanks for considering a contribution to pyroSAR. Any contribution, may it be a feature suggestion, a pull
request or s simple bug report, is valuable to the project and very welcome.
This document is intended as a guideline on best practices.
## How to open an issue
The easiest way to contribute to pyroSAR is by opening an issue. This is intended for reporting software bugs and
suggesting new features. Before you do, please read through the list of
[open issues](https://github.com/johntruckenbrodt/pyroSAR/issues) to see whether this issue has already been raised.
This way, duplicates can be reduced and it is easier for the developers to address them.
If you are not sure whether your issue is a duplicate of an existing one, just open a new issue. It is easier to link
two existing similar issues than separating two different ones contained in one.
For reporting bugs please fill out the template, which is available once you open it. For suggesting new features you
can just delete the template text.
The following questions need to be answered so that is is possible for the developers to start fixing the software:
- which operating system are you using?
e.g. Windows 10, Ubuntu 18.4, etc.
- which environment is pyroSAR running in?
e.g. system-wide Python installation, Anaconda environment, virtual environment, etc.
- which version of pyroSAR are you using?
one installed via pip or a clone of the GitHub repository?
- which function of pyroSAR did you call with which parameters?
- if applicable, which version of SNAP or GAMMA are you using in pyroSAR?
- the full error message
This way the error is reproducible and can quickly be fixed.
## Checking pyroSAR's version
The used version can be obtained like this:
```python
import pyroSAR
print(pyroSAR.__version__)
```
Depending on how you installed pyroSAR the version might look differently.
If installed via pip with `pip install pyroSAR`, the package is downloaded from
[PyPI](https://pypi.org/project/pyroSAR/),
where only the main releases are stored and versions are named e.g. `0.9.1`.
These can also be found on GitHub [here](https://github.com/johntruckenbrodt/pyroSAR/releases).
If you have installed pyroSAR directly from GitHub like so:
```shell script
python3 -m pip install git+https://github.com/johntruckenbrodt/pyroSAR
```
or have directly cloned a branch from GitHub, your version might look like this:
`0.9.2.dev103+g57eeb30`, in which this naming pattern is used:
`{next_version}.dev{distance}+{scm letter}{revision hash}`.
In this case we can see that git is used as scm and the latest commit of the software was
[57eeb30](https://github.com/johntruckenbrodt/pyroSAR/commit/57eeb30970dc6adfee62ca12fd8c8818ecaf3a14),
which, at the time of checking the version, had a distance of 103 commits to the latest commit.
See [here](https://www.diycode.cc/projects/pypa/setuptools_scm) for more details.
================================================
FILE: LICENSE.txt
================================================
# Copyright (c) 2014-2026, the pyroSAR Developers.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
================================================
FILE: MANIFEST.in
================================================
recursive-include pyroSAR/snap *.xml
recursive-include pyroSAR/snap/data *
recursive-include pyroSAR/ERS/data *
recursive-exclude tests *
recursive-exclude .github *
exclude .travis.yml appveyor.yml
================================================
FILE: README.md
================================================
A Python Framework for Large-Scale SAR Satellite Data Processing
The pyroSAR package aims at providing a complete solution for the scalable organization and processing of SAR satellite data:
* Reading of data from various past and present satellite missions
* Handling of acquisition metadata
* User-friendly access to processing utilities in [SNAP](https://step.esa.int/main/toolboxes/snap/)
and [GAMMA Remote Sensing](https://www.gamma-rs.ch/) software
* Formatting of the preprocessed data for further analysis
* Export to Data Cube solutions
Head on over to [readthedocs](https://pyrosar.readthedocs.io/en/latest/?badge=latest) for installation instructions,
examples and API reference.
================================================
FILE: appveyor.yml
================================================
# thanks a lot to the Nansat project (https://github.com/nansencenter/nansat) from which this file was adapted
environment:
matrix:
- TARGET_ARCH: x64
CONDA_PY: 36
CONDA_INSTALL_LOCN: C:\Miniconda3-x64
GDAL_DATA: C:\Miniconda3-x64\Library\share\gdal
PROJECT_DIR: C:\projects\pyrosar
SNAP_INSTALL: C:\projects\snap
PGUSER: postgres
PGPASSWORD: Password12!
SNAP_VERSION: 10
SNAP_INSTALLER: esa-snap_sentinel_windows-%SNAP_VERSION%.0.0.exe
platform:
- x64
services:
- postgresql96
install:
# Cygwin's git breaks conda-build. (See https://github.com/conda-forge/conda-smithy-feedstock/pull/2.)
- rmdir C:\cygwin /s /q
# install PostGIS
- appveyor DownloadFile https://download.osgeo.org/postgis/windows/pg96/archive/postgis-bundle-pg96-3.2.0x64.zip
- 7z x .\postgis-bundle-pg96-3.2.0x64.zip
- xcopy /e /y /q .\postgis-bundle-pg96-3.2.0x64 C:\Progra~1\PostgreSQL\9.6
# activate conda
- call %CONDA_INSTALL_LOCN%\Scripts\activate.bat
# If there is a newer build queued for the same PR, cancel this one.
- appveyor DownloadFile https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py
- python ff_ci_pr_build.py -v --ci "appveyor" "%APPVEYOR_ACCOUNT_NAME%/%APPVEYOR_PROJECT_SLUG%" "%APPVEYOR_BUILD_NUMBER%" "%APPVEYOR_PULL_REQUEST_NUMBER%"
- del ff_ci_pr_build.py
# update conda
- conda update --yes --quiet conda
- set PYTHONUNBUFFERED=1
# Add our channels.
- conda config --set show_channel_urls true
- conda config --remove channels defaults
- conda config --add channels defaults
- conda config --add channels conda-forge
# install ESA SNAP
- appveyor DownloadFile https://download.esa.int/step/snap/%SNAP_VERSION%_0/installers/%SNAP_INSTALLER%
- start %SNAP_INSTALLER% -q -dir %SNAP_INSTALL%
- set PATH=%PATH%;%SNAP_INSTALL%\bin
- echo %PATH%
# Configure the VM.
- conda env create --file environment-dev.yml
- conda activate ps_test_dev
- pip install .
# Skip .NET project specific build phase.
build: false
test_script:
- coverage run -m pytest
================================================
FILE: datacube_prepare.ipynb
================================================
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"This is a quick notebook to demonstrate the pyroSAR functionality for importing processed SAR scenes into an Open Data Cube"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from pyroSAR.datacube_util import Product, Dataset\n",
"from pyroSAR.ancillary import groupby, find_datasets"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# define a directory containing processed SAR scenes\n",
"dir = '/path/to/some/data'\n",
"\n",
"# define a name for the product YML; this is used for creating a new product in the datacube\n",
"yml_product = './product_def.yml'\n",
"\n",
"# define a directory for storing the indexing YMLs; these are used to index the dataset in the datacube\n",
"yml_index_outdir = './yml_indexing'\n",
"\n",
"# define a name for the ingestion YML; this is used to ingest the indexed datasets into the datacube\n",
"yml_ingest = './ingestion.yml'\n",
"\n",
"# product description\n",
"product_name_indexed = 'S1_GRD_index'\n",
"product_name_ingested = 'S1_GRD_ingest'\n",
"product_type = 'gamma0'\n",
"description = 'this is just some test'\n",
"\n",
"# define the units of the dataset measurements (i.e. polarizations)\n",
"units = 'backscatter'\n",
"# alternatively this could be a dictionary:\n",
"# units = {'VV': 'backscatter VV', 'VH': 'backscatter VH'}\n",
"\n",
"ingest_location = './ingest'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# find pyroSAR files by metadata attributes\n",
"files = find_datasets(dir, recursive=True, sensor=('S1A', 'S1B'), acquisition_mode='IW')\n",
"\n",
"# group the found files by their file basenames\n",
"# files with the same basename are considered to belong to the same dataset\n",
"grouped = groupby(files, 'outname_base')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"print(len(files))\n",
"print(len(grouped))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"In the next step we create a new product, add the grouped datasets to it and create YML files for indexing the datasets in the cube."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# create a new product and add the collected datasets to it\n",
"# alternatively, an existing product can be used by providing the corresponding product YML file\n",
"with Product(name=product_name_indexed,\n",
" product_type=product_type,\n",
" description=description) as prod:\n",
"\n",
" for dataset in grouped:\n",
" with Dataset(dataset, units=units) as ds:\n",
"\n",
" # add the datasets to the product\n",
" # this will generalize the metadata from those datasets to measurement descriptions,\n",
" # which define the product definition\n",
" prod.add(ds)\n",
"\n",
" # parse datacube indexing YMLs from product and dataset metadata\n",
" prod.export_indexing_yml(ds, yml_index_outdir)\n",
"\n",
" # write the product YML\n",
" prod.write(yml_product)\n",
" \n",
" # print the product metadata, which is written to the product YML\n",
" print(prod)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Now that we have a YML file for creating a new product and individual YML files for indexing the datasets, we can create a last YML file, which will ingest the indexed datasets into the cube. For this a new product is created and the files are converted to NetCDF, which are optimised for useage in the cube. The location of those NetCDF files also needs to be defined."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"with Product(yml_product) as prod:\n",
" prod.export_ingestion_yml(yml_ingest, product_name_ingested, ingest_location, \n",
" chunking={'x': 512, 'y': 512, 'time': 1})"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "cubeenv",
"language": "python",
"name": "cubeenv"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.6"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
================================================
FILE: docs/Makefile
================================================
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
================================================
FILE: docs/make.bat
================================================
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
exit /b 1
)
if "%1" == "" goto help
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd
================================================
FILE: docs/source/about/changelog.rst
================================================
#########
Changelog
#########
0.6 | 2018-11-20
================
SAR metadata
------------
- new standardized metadata fields `orbitNumber_abs`, `orbitNumber_rel`, `cycleNumber` and `frameNumber` for all SAR
formats
- customization of output file names with additional metadata fields (e.g. orbit numbers)
software configuration
----------------------
- pyroSAR configuration file handling: the paths to the SNAP and Gamma installation as well as relevant metadata
directories are now registered in a configuration file `config.ini`, which is stored in a directory `.pyrosar` in the
user home directory
- improved SNAP installation verification: pyroSAR now performs a deeper check of the SNAP installation to make sure
it is not mistaken with e.g. the Ubuntu package manager snap; relevant installation executables and directories are
stored in the configuration file
general functionality
---------------------
- deeper integration of package `spatialist `_: all the spatial file
handling functionality that was part of pyroSAR is now part of package spatialist; now all the functionality is imported
from spatialist and removed from pyroSAR
- improved search for datasets processed by pyroSAR: new helper functions exist, which make it easier to search for
datasets by metadata fields, which are internally searched for in the respective file names
- introduced gamma function parser: these new tools search for a GAMMA_HOME environment variable and, if found, parse
Python functions from the docstring of respective command line tools; for this, new Python scripts are created, which
are stored alongside the configuration file in the user home directory; this way users can easily use Python functions
with named parameters instead of the positional arguments of the Gamma command line tools
- improved documentation
Open Data Cube Export
---------------------
functionality to export processed datasets directly to an Open Data Cube:
it is now possible to create Open Data Cube product YML files as well as YML files for data indexing and ingestion
into this product; pyroSAR also internally checks for compatibility of a particular dataset with the target product;
this way, the resulting files can easily be passed to the Open Data Cube command line tools
several bug fixes
SNAP API
--------
improved SNAP processing workflow node linking: it is now possible to add a node also before an existing one, instead
of just after it
Python package integrity
------------------------
- add trove classifiers for supported operating systems and MIT license for easier online search
- exchange http with https for all URLs that support it
0.7 | 2019-01-03
================
several changes to the functioning of the Gamma command API
GAMMA API
---------
processing
++++++++++
- :func:`pyroSAR.gamma.geocode`:
* optionally write all Gamma commands to shellscript
* newly introduced choice of normalization method
* changed normalization default approach
- :func:`pyroSAR.gamma.process`:
* new parameter `logfile` to specify a logfile instead of just a directory with automated file naming
* new parameter `shellscript` to write the executed command to a shell script protocol
command parser
++++++++++++++
- add parameters `outdir` and `shellscript` to parsed functions
- extensive improvement to accurately parse more commands
- add parameter `inlist` to some commands, which require interactive input via `stdin`
general
+++++++
- several bug fixes
- extended documentation
- make use of parsed command functions internally
- enable passing `logpath`, `outdir` and `shellscript` to all parsed functions via additional parameters for other
convenience functions
0.8 | 2019-02-11
================
Auxiliary Data Handling
-----------------------
- new module auxdata with function :func:`pyroSAR.auxdata.dem_autoload` to automatically download tiles of
different DEM types overlapping with given geometries
- class :class:`pyroSAR.S1.OSV`: reduced search time for new RES orbit state vector files;
included more meaningful status messages
GAMMA API
---------
- new function :func:`pyroSAR.gamma.srtm.dem_autocreate` to automatically create DEMs in Gamma format from the output
of function :func:`pyroSAR.auxdata.dem_autoload`
- improved writing of ENVI HDR files from class :class:`pyroSAR.gamma.ISPPar`
- class :class:`pyroSAR.gamma.UTM`: improved to work with newer Gamma versions
- function :func:`pyroSAR.gamma.geocode`:
+ improved documentation
+ clarified code for better readability
+ more consistent naming scheme for all temporarily written files
+ export temporarily written files (e.g. local incidence angle) via new parameter `export_extra`
+ additional parametrization tests to ensure best processing result
+ changed default of parameter `func_interp` to 2 to work best with default of parameter `normalization_method`
(see documentation of Gamma command pixel_area)
SNAP API
--------
- function :func:`pyroSAR.snap.util.geocode`:
+ export temporarily written files (e.g. local incidence angle) via new parameter `export_extra`
0.9 | 2019-06-15
================
Drivers
-------
- :class:`pyroSAR.drivers.SAFE`: read heading angle, incident angle and image geometry (e.g. Ground Range) from metadata
- :class:`pyroSAR.drivers.Archive`: improved cross-compatibility with Python2 and Python3
SNAP API
--------
- function :func:`pyroSAR.snap.util.geocode`:
+ option to export `DEM` via parameter `export_extra`
+ added Sentinel-1 `ThermalNoiseRemoval` node via new parameter `removeS1ThermalNoise`
+ added `Multilook` node which is executed to approximate the target resolution if necessary
(currently only for Sentinel-1 since metadata entries `incidence` and `image_geometry` are required)
+ new parameter `groupsize` to split workflows into several groups, which are executed separately with
intermediate products written to disk. This increases processing speed
+ simplified internal node parametrization for easier use in future functions
+ fail if no POE orbit state vector file is found
+ `Terrain-Flattening`:
* added additional parameters `additionalOverlap` and `oversamplingMultiple`
* use bilinear instead of bicubic interpolation
+ `Remove-GRD-Border-Noise`: decrease `borderLimit` from 1000 to 500 (SNAP default)
+ new parameter `gpt_exceptions` to execute workflows containing specific nodes with different GPT versions than
the default one
+ automatically remove node parameters on GPT fail and re-run the modified workflow; this is relevant if a node is
executed in an older GPT version (e.g. via parameter `gpt_exceptions`), which does not accept parameters which were
introduced in later GPT versions (e.g. those described above for node `Terrain-Flattening`)
+ disable/enable terrain flattening via new parameter `terrainFlattening`
+ optionally return workflow filename with new parameter `returnWF`
+ execute custom pyroSAR S1 GRD border noise removal (see :func:`pyroSAR.S1.removeGRDBorderNoise`)
+ new parameters `demResamplingMethod` and `imgResamplingMethod`
GAMMA API
---------
- SRTM Tools renamed to DEM Tools
+ function :func:`pyroSAR.gamma.dem.dem_autocreate`:
* define arbitrary output CRS and resolution via new parameters `t_srs` and `tr`
* optionally perform geoid to ellipsoid conversion in either GDAL or GAMMA via new parameter `geoid_mode`
- function :func:`pyroSAR.gamma.geocode`:
+ removed multiplication of backscatter with cosine of incident angle via command `lin_comb`
+ fixed bug in writing correct nodata values to ancillary products defined via parameter `export_extra`
+ changed default of parameter `func_geoback` from 2 to 1 (GAMMA default)
- function :func:`pyroSAR.gamma.correctOSV`:
+ fixed bug in using the first OSV file in a directory for correcting an image, which resulted in S1B files being
corrected with S1A OSV files. This occasionally resulted in errors of no DEM overlap while processing S1B scenes
- fixed bug in treating GAMMA image pixel coordinates as top left instead of pixel center. This is relevant for writing
ENVI HDR files for GAMMA images via function :func:`pyroSAR.gamma.par2hdr` resulting in the image to be shifted
by 1/2 pixel to Southeast
Command Parser
++++++++++++++
- compatibility with GAMMA version released in November 2018
- delete parsed modules if environment variable `GAMMA_HOME` was reset causing them to be re-parsed with the new version
on module import
general functionality
---------------------
- new function :func:`pyroSAR.ancillary.multilook_factors` to compute factors depending on image geometry and target resolution
- :func:`pyroSAR.S1.removeGRDBorderNoise`: reached Python3 compatibility
Auxiliary Data Handling
-----------------------
- new function :func:`pyroSAR.auxdata.dem_create` for convenient creation of DEM mosaics as downloaded by
:func:`pyroSAR.auxdata.dem_autoload`
- function :func:`pyroSAR.auxdata.dem_autoload`: download 1 degree tiles instead of 5 degree tiles
- class :class:`pyroSAR.S1.OSV`:
+ download files specific to the Sentinel-1 sensor (S1A/S1B) instead of all matching the acquisition time
+ improved time span search, which occasionally resulted in missing OSV files
0.9.1 | 2019-07-05
==================
Auxiliary Data Handling
-----------------------
- function :func:`pyroSAR.auxdata.dem_create`: new parameter `resampling_method`
GAMMA API
---------
- function :func:`pyroSAR.gamma.dem.dem_autocreate`: new parameter `resampling_method`
SNAP API
--------
- function :func:`pyroSAR.snap.util.geocode`: fixed typo of parameter `removeS1BorderNoise`
0.10 | 2019-12-06
=================
Drivers
-------
- method :meth:`~pyroSAR.drivers.ID.bbox`: choose the output vector file format via new parameter `driver` or by
using one of spatialist's supported file name extensions (see :meth:`spatialist.vector.Vector.write`)
- :class:`pyroSAR.drivers.SAFE`
+ new method :meth:`~pyroSAR.drivers.SAFE.quicklook` for writing KMZ quicklooks
+ method :meth:`~pyroSAR.drivers.SAFE.getOSV`: renamed parameter `outdir` to `osvdir`
- :class:`pyroSAR.drivers.Archive`: remove scenes from the database if they cannot be found at their file location.
This is performed at each initialization of an `Archive` object.
GAMMA API
---------
- new parameter `basename_extensions` for adding extra metadata fields to output image names; affects:
+ :func:`pyroSAR.gamma.convert2gamma`
+ :func:`pyroSAR.gamma.geocode`
- :func:`pyroSAR.gamma.correctOSV`: make use of OSV files in SNAP's auxdata structure
- :func:`pyroSAR.gamma.geocode`: made border nose removal optional with new parameter `removeS1BorderNoise`
SNAP API
--------
- workflow parsing
+ improved output XML for better display in SNAP GUI
+ support for nodes with multiple input scenes, e.g. `SliceAssembly`
- SAR processor (function :func:`~pyroSAR.snap.auxil.gpt`)
+ write Sentinel-1 manifest.safe with processing results
+ two methods for border noise removal: `ESA` and `pyroSAR` via new parameter `removeS1BorderNoiseMethod`
- function :func:`pyroSAR.snap.util.geocode`
+ optional speckle filtering with new parameter `speckleFilter`
+ choose the output backscatter reference area (`beta0`/`gamma0`/`sigma0`) with new parameter `refarea`
+ default of parameter `groupsize` changed to 1
+ internally download S1 OSV files
+ internally download SNAP's `EGM96` geoid to `WGS84` ellipsoid DEM conversion lookup table via new function
:func:`pyroSAR.snap.auxil.get_egm96_lookup`
+ support for multi-scene `SliceAssembly`; can be invoke by passing a list of scenes to parameter `infile`
+ new parameter `removeS1BorderNoiseMethod`
+ new parameter `gpt_args` to pass additional arguments to the GPT call
Datacube Tools
--------------
- :meth:`pyroSAR.datacube_util.Product.export_ingestion_yml`: new parameter `chunking`
Auxiliary Data Handling
-----------------------
- OSV download functionality (class :class:`pyroSAR.S1.OSV`)
+ made definition of OSV download directory optional; default is SNAP's auxdata directory
+ organization of downloaded files into SNAP's auxdata structure:
* compression to zip
* sort files into subdirs for sensor, year, month
+ removed method :meth:`~pyroSAR.S1.OSV.update`
Ancillary Tools
---------------
- :func:`pyroSAR.ancillary.parse_datasetname`
+ support for datasets in NetCDF format
+ enable parsing of ancillary products like local incidence angle (\*inc_geo.tif)
- :func:`pyroSAR.ancillary.find_datasets`: new parameters `start` and `stop` for time filtering
general
-------
- bug fixes and documentation improvements
0.10.1 | 2019-12-12
===================
GAMMA API
---------
- :ref:`Command API ` compatibility with GAMMA version 20191203
0.11 | 2020-05-29
=================
Drivers
-------
- :class:`pyroSAR.drivers.Archive`: completely restructured to use the `SQLAlchemy `_
Object Relational Mapper (ORM). This makes it possible to switch between SQLite+Spatialite and PostgreSQL+PostGIS
database backends.
- :meth:`pyroSAR.drivers.SAFE.getOSV`: new argument `returnMatch` to also return the name of an OSV file instead of just
downloading it.
SNAP API
--------
- arbitrary nodes can now be parsed. Before, only a small selection of nodes (those used by function
:func:`~pyroSAR.snap.util.geocode`) were available. Now, any node and its default parametrization can be parsed to XML
from the GPT documentation by internally calling e.g.:
::
gpt Terrain-Flattening -h
The parsed XML representation is saved for faster future reuse. See function :func:`~pyroSAR.snap.auxil.parse_node`
for details. In all cases the standard SNAP file suffix is used for output products, e.g. `_TF` for
`Terrain-Flattening`.
- multi-source nodes like `SliceAssembly` now take any number of sources, not just two.
See class :class:`~pyroSAR.snap.auxil.Node`.
- function :func:`pyroSAR.snap.util.geocode`:
+ new argument `nodataValueAtSea` to decide whether sea areas are masked out.
Depends on the quality of the sea mask in the input DEM.
+ automatically download required Sentinel-1 Orbit State Vector (OSV) files.
+ new argument `allow_RES_OSV` to decide whether to allow usage of the less accurate Sentinel-1 RES OSV files in
case the POE file is not available yet.
+ new argument `demName` to choose the type of the auto-downloaded DEM.
Auxiliary Data Handling
-----------------------
- class :class:`pyroSAR.S1.OSV`:
+ removed progressbar from method :meth:`~pyroSAR.S1.OSV.catch` and made it optional in method
:meth:`~pyroSAR.S1.OSV.retrieve` with new argument `pbar`
general
-------
- bug fixes, new automated tests, documentation improvements
0.11.1 | 2020-07-17
===================
- bug fixes
GAMMA API
---------
- :ref:`Command API ` compatibility with GAMMA version 20200713
0.12 | 2021-02-19
=================
Drivers
-------
- :class:`pyroSAR.drivers.Archive`:
+ new argument `cleanup` to automatically remove missing scenes from database on initialization
+ method :meth:`~pyroSAR.drivers.Archive.insert`: improved insertion speed
+ method :meth:`~pyroSAR.drivers.Archive.select_duplicates`: new argument `value`
+ method :meth:`~pyroSAR.drivers.Archive.get_colnames`: new argument `table` to get column names from arbitrary
tables, not just the main `data` table
+ method :meth:`~pyroSAR.drivers.Archive.drop_element`: option to remove scene from `data` and `duplicates` tables
simultaneously by removing argument `table` and adding argument `with_duplicates`
+ method :meth:`~pyroSAR.drivers.Archive.drop_table`:
* new argument `verbose`
* remove arbitrary tables, not just `data` and `duplicates`
+ method :meth:`~pyroSAR.drivers.Archive.drop_database`: replaced by new function :func:`pyroSAR.drivers.drop_archive`
+ new method :meth:`~pyroSAR.drivers.Archive.add_tables` to add custom tables to a database
+ bug fixes
- :class:`pyroSAR.drivers.CEOS_PSR`:
+ added support for ALOS-1 PALSAR
+ added basic support for Level 1.0 data
- :class:`pyroSAR.drivers.SAFE`:
+ method :meth:`~pyroSAR.drivers.SAFE.getOSV`: new argument `useLocal` to not search online if local matching
files are found
GAMMA API
---------
- :ref:`Command API ` compatibility with GAMMA version 20201216
- function :func:`pyroSAR.gamma.convert2gamma`:
+ renamed argument `S1_noiseremoval` to `S1_tnr` (thermal noise removal)
+ new argument `S1_bnr` (border noise removal)
- function :func:`pyroSAR.gamma.geocode`:
+ new default ``removeS1BorderNoiseMethod='gamma'``
+ renamed argument `tempdir` to `tmpdir`
SNAP API
--------
- function :func:`pyroSAR.snap.util.geocode`:
+ enable grid alignment with new arguments `alignToStandardGrid`, `standardGridOriginX` and `standardGridOriginY`
+ new argument `tmpdir` to choose the location of temporarily created files
+ bug fixes
- function :func:`pyroSAR.snap.auxil.gpt`:
+ perform custom pyroSAR S1 GRD border noise removal only if IPF<2.9
Auxiliary Data Handling
-----------------------
- function :func:`pyroSAR.auxdata.dem_autoload`: return `None` if a VRT was defined
0.12.1 | 2021-03-09
===================
SNAP API
--------
- function :func:`pyroSAR.snap.util.geocode`:
+ output both sigma0 and gamma0 via argument `refarea`
+ new `export_extra` option 'layoverShadowMask'
- numerous bug fixes and API improvements
Auxiliary Data Handling
-----------------------
- class :class:`pyroSAR.S1.OSV`:
+ download files from https://scihub.copernicus.eu/gnss
0.13 | 2021-09-10
=================
Drivers
-------
- new class :class:`pyroSAR.drivers.EORC_PSR`
- new argument `exist_ok` for ID object unpack methods to enable reuse of already unpacked scenes
- :meth:`pyroSAR.drivers.SAFE.getOSV`: new argument `url_option` to choose between different download URLs
- :class:`pyroSAR.drivers.SAFE` align coordinate sorting of attribute `meta['coordinates']` with CRS description
- :func:`pyroSAR.drivers.identify_many`: disable progressbar by default
GAMMA API
---------
- adaptations to enable processing of :class:`~pyroSAR.drivers.EORC_PSR` data:
+ :func:`pyroSAR.gamma.calibrate`
+ :func:`pyroSAR.gamma.convert2gamma`
+ :func:`pyroSAR.gamma.geocode`
- :func:`pyroSAR.gamma.geocode`:
+ experimental optional refinement of the geocoding lookup table with new argument `refine_lut`
+ removed arguments `normalization_method`, `func_interp`, `removeS1BorderNoise`, `sarSimCC`
+ limit radiometric normalization to RTC correction method
+ simplify and improve computation of RTC contribution area
+ file suffices `pan` and `norm` have been replaced with `gamma0-rtc`
+ argument `export_extra` options:
* removed `pix_geo`
* renamed `pix_fine` to `pix_ratio`
* added `pix_area_sigma0`, `pix_area_sigma0_geo`, `pix_area_gamma0_geo`, `gs_ratio` , `gs_ratio_geo`, `pix_ratio_geo`
+ use a dedicated temporary directory to unpack the scene and write GAMMA files so that they are separated (the GAMMA
files used to be written to the unpacked scene's directory)
+ enable multiple scenes as input so that they can be mosaiced in SAR geometry before geocoding
- :func:`pyroSAR.gamma.correctOSV`: new argument `directory`
- :func:`pyroSAR.gamma.multilook`: new argument `exist_ok`
- :func:`pyroSAR.gamma.convert2gamma`: new argument `exist_ok`
- function :func:`pyroSAR.gamma.dem.dem_autocreate`:
+ do not apply an extent buffer by default
+ allow geometry in arbitrary CRS
SNAP API
--------
- function :func:`pyroSAR.snap.util.geocode`:
+ new `export_extra` option `scatteringArea`
- extended support for `BandMaths` operator
Auxiliary Data Handling
-----------------------
- method :meth:`pyroSAR.S1.OSV.catch`: new argument `url_option` with two download URLs to choose from
- function :func:`pyroSAR.auxdata.dem_autoload`:
+ added new DEM option `GETASSE30`
+ align pixels of subsetted VRT with original tiles
- function :func:`pyroSAR.auxdata.dem_create`:
+ new argument `outputBounds`
general
-------
- replaced print messages with logging. This made the `verbose` argument that was used by several functions and
methods obsolete; affects the following:
+ :func:`pyroSAR.drivers.identify_many`: replaced by argument `pbar`
+ :meth:`pyroSAR.drivers.Archive.add_tables`: removed
+ :meth:`pyroSAR.drivers.Archive.drop_table`: removed
+ :meth:`pyroSAR.drivers.Archive.insert`: replaced by argument `pbar`
+ :meth:`pyroSAR.drivers.Archive.import_outdated`: removed
+ :meth:`pyroSAR.drivers.Archive.move`: replaced by argument `pbar`
+ :meth:`pyroSAR.drivers.Archive.select`: removed
+ :func:`pyroSAR.snap.auxil.execute`: removed
See section :doc:`Logging ` for details.
0.14.0 | 2021-10-12
===================
Drivers
-------
- raise more appropriate errors (`c430c59 `_)
- :func:`pyroSAR.drivers.findfiles`: removed (functionality contained in :meth:`pyroSAR.drivers.ID.findfiles`,
now making use of :func:`spatialist.ancillary.finder`)
- :meth:`pyroSAR.drivers.Archive.select`:
+ show progressbar for scene identification if ``pbar=True``
+ enabled input of :obj:`~datetime.datetime` objects for arguments ``mindate`` and ``maxdate``
- :func:`pyroSAR.drivers.identify_many`: issue a warning when a file cannot be accessed
(instead of raising a :obj:`PermissionError`)
GAMMA API
---------
- :func:`pyroSAR.gamma.dem.dem_autocreate`: support for new DEM options provided by :func:`pyroSAR.auxdata.dem_autoload`
SNAP API
--------
- :func:`pyroSAR.snap.auxil.get_egm96_lookup` removed in favor of new function :func:`pyroSAR.auxdata.get_egm_lookup`
Auxiliary Data Handling
-----------------------
- method :meth:`pyroSAR.S1.OSV.retrieve`: thread-safe writing of orbit files
- new function :func:`pyroSAR.auxdata.get_egm_lookup`
- function :func:`pyroSAR.auxdata.dem_create`
+ new geoid option 'EGM2008'
+ make use of :func:`~pyroSAR.auxdata.get_egm_lookup` for auto-download of EGM lookup files
+ several bug fixes related to vertical CRS transformation
+ bug fix for target pixel alignment
- function :func:`pyroSAR.auxdata.dem_autoload`: new DEM options:
+ 'Copernicus 10m EEA DEM'
+ 'Copernicus 30m Global DEM'
+ 'Copernicus 90m Global DEM'
general
-------
- replaced http URLs with https where applicable
- improved documentation
0.15.0 | 2022-01-04
===================
Drivers
-------
- :meth:`pyroSAR.drivers.ID.geometry`: new method
GAMMA API
---------
- :ref:`Command API ` compatibility with GAMMA version 20211208
- renamed argument `resolution` to `spacing`; affects:
+ :func:`pyroSAR.gamma.geocode`
+ :func:`pyroSAR.gamma.ovs`
+ :func:`pyroSAR.gamma.multilook`
- function :func:`pyroSAR.gamma.calibrate`
+ removed argument `replace`
+ added argument `return_fnames`
- function :func:`pyroSAR.gamma.convert2gamma`
+ added argument `return_fnames`
- function :func:`pyroSAR.gamma.multilook`
+ pass multiple Sentinel-1 sub-swaths to argument `infile` which are then
combined into a single MLI using GAMMA command `isp.multi_look_ScanSAR`
- class :class:`pyroSAR.gamma.ISPPar`:
+ new object attribute `filetype` with possible values 'isp' and 'dem'
SNAP API
--------
- function :func:`pyroSAR.snap.util.geocode`:
+ enabled SLC processing
+ enable processing of sigma nought RTC
+ new `export_extra` argument `gammaSigmaRatio`
+ simplified workflow by writing layover-shadow mask directly from `Terrain-Correction`
+ changed processing node sequence:
* was: Read->ThermalNoiseRemoval->SliceAssembly->Remove-GRD-Border-Noise->Calibration
* is: Read->Remove-GRD-Border-Noise->Calibration->ThermalNoiseRemoval->SliceAssembly
+ new output image naming scheme, e.g.
* S1A__IW___A_20210914T191350_VV_gamma0-rtc.tif
* S1A__IW___A_20210914T191350_VH_sigma0-elp.tif
- function :func:`pyroSAR.snap.auxil.gpt`:
+ removed argument `multisource`
+ added argument `tmpdir`
Auxiliary Data Handling
-----------------------
- function :func:`pyroSAR.auxdata.dem_autoload`:
+ updated version of 'Copernicus 10m EEA DEM' from '2020_1' to '2021_1'
+ new DEM options:
* 'Copernicus 30m Global DEM II'
* 'Copernicus 90m Global DEM II'
general
-------
- compatibility with sqlalchemy>=1.4
0.15.1 | 2022-01-07
===================
general
-------
- bug fixes
0.16.0 | 2022-03-03
===================
Drivers
-------
- :class:`pyroSAR.drivers.BEAM_DIMAP`: new driver supporting SNAP's BEAM-DIMAP format
- :class:`pyroSAR.drivers.SAFE`:
+ corrected SLC metadata (was read from first sub-swath, now from center sub-swath or as sum of all sub-swaths):
center: spacing, heading, incidence; sum: samples, lines
+ new property :attr:`pyroSAR.drivers.SAFE.resolution`
Auxiliary Data Handling
-----------------------
- create water body mask mosaics from ancillary DEM products. Affects the following:
+ function :func:`pyroSAR.auxdata.dem_autoload`: new arguments `nodata` and `hide_nodata`
- function :func:`pyroSAR.auxdata.dem_create`:
+ new arguments `pbar` and `threads`
SNAP API
--------
- new method :meth:`pyroSAR.snap.auxil.Par_BandMath.add_equation`
- new function :func:`pyroSAR.snap.util.noise_power`
- new function :func:`pyroSAR.snap.auxil.erode_edges`
- function :func:`pyroSAR.snap.auxil.writer`:
+ new arguments `clean_edges` and `clean_edges_npixels`
(to make use of function :func:`~pyroSAR.snap.auxil.erode_edges`)
+ enabled conversion of BEAM-DIMAP files
- function :func:`pyroSAR.snap.util.geocode`:
+ new arguments `clean_edges` and `clean_edges_npixels` (see function :func:`~pyroSAR.snap.auxil.writer`)
+ renamed argument `tr` to `spacing`
+ new arguments `rlks` and `azlks` to manually set the number of looks
GAMMA API
---------
- function :func:`pyroSAR.gamma.geocode`:
+ new arguments `rlks` and `azlks`
- function :func:`pyroSAR.gamma.multilook`:
+ new arguments `rlks` and `azlks`
general
-------
- correction of multi-look factor computation. Before: approximate target pixel spacing but never exceed it.
Now: first best approximate the azimuth spacing as close as possible (even if this means exceeding the target spacing)
and then choose the range looks to approximate a square pixel as close as possible. API changes:
+ function :func:`pyroSAR.ancillary.multilook_factors`:
* renamed argument `sp_rg` to `source_rg`
* renamed argument `sp_az` to `source_az`
* replaced arguments `tr_rg` and `tr_az` with unified `target`
0.16.1 | 2022-03-07
===================
Auxiliary Data Handling
-----------------------
- function :func:`pyroSAR.auxdata.get_egm_lookup`:
+ changed URL for PROJ geoid models, which results in better performance for
function :func:`pyroSAR.auxdata.dem_create`
(See `pyroSAR#200 `_).
0.16.2 | 2022-03-14
===================
SNAP API
--------
- function :func:`pyroSAR.snap.util.noise_power`: added missing orbit state vector refinement
0.16.3 | 2022-03-23
===================
SNAP API
--------
- function :func:`pyroSAR.snap.util.noise_power`: pass argument `cleanup` to :func:`~pyroSAR.snap.auxil.gpt` call
- function :func:`~pyroSAR.snap.auxil.gpt`: shortened names of temporary directories
- function :func:`~pyroSAR.snap.auxil.erode_edges`: fixed bug in polygon selection
- function :func:`~pyroSAR.snap.auxil.writer`: do not erode edges of layover-shadow mask
0.17.0 | 2022-05-30
===================
SNAP API
--------
- function :func:`pyroSAR.snap.erode_edges`: reuse mask for all images
GAMMA API
---------
- new function :func:`pyroSAR.gamma.dem.dem_import`
- function :func:`pyroSAR.gamma.geocode`:
+ new argument `update_osv`
general
-------
- full support for Sentinel-1 stripmap mode; renamed `SM` naming pattern to `S1..S6` to differentiate different beams
- bug fixes
0.17.2 | 2022-06-23
===================
Auxiliary Data Handling
-----------------------
- function :func:`pyroSAR.auxdata.dem_create`:
+ use maximum possible value of `dtype` (e.g. 255 for unit8) instead of -32767.0 if the nodata value cannot be read from the source file
+ always use the same value for source and destination nodata
0.17.3 | 2022-07-03
===================
Auxiliary Data Handling
-----------------------
- function :func:`pyroSAR.auxdata.dem_create`:
+ In case the nodata value could not be read from the source file, the function used to define a value itself, which is prone to errors. This value now needs to be set by a user via new argument `nodata` if it cannot be read from the source file.
+ bug fix: no longer try to download 'Copernicus 30m Global DEM' or 'Copernicus 90m Global DEM' tiles that don't exist.
- function :func:`pyroSAR.auxdata.dem_autoload`:
+ new argument `dst_nodata`. This can be used to temporarily override the native nodata value for extrapolation of ocean areas (in combination with ``hide_nodata=True``).
0.18.0 | 2022-08-24
===================
Drivers
-------
- method :meth:`pyroSAR.drivers.SAFE.quicklook`: new argument `na_transparent`
- new class :class:`~pyroSAR.drivers.TDM`
- method :meth:`pyroSAR.drivers.TSX.getCorners`: fixed bug in longitude computation
- class :class:`~pyroSAR.drivers.ESA`: improved support for ERS and ASAR
GAMMA API
---------
- :ref:`Command API ` compatibility with GAMMA version 20220629
SNAP API
--------
- compatibility with SNAP version 9
- function :func:`~pyroSAR.snap.util.geocode`: improved support for ERS and ASAR
0.19.0 | 2022-09-28
===================
Drivers
-------
- class :class:`pyroSAR.drivers.ESA`: added support for ASAR WSM
SNAP API
--------
- new convenience functions:
+ :func:`pyroSAR.snap.auxil.geo_parametrize`
+ :func:`pyroSAR.snap.auxil.sub_parametrize`
+ :func:`pyroSAR.snap.auxil.mli_parametrize`
+ :func:`pyroSAR.snap.auxil.dem_parametrize`
- function :func:`pyroSAR.snap.auxil.orb_parametrize`: removed args `workflow`, `before`, `continueOnFail`; added `kwargs`
- function :func:`pyroSAR.snap.auxil.erode_edges`: extended to also take a BEAM-DIMAP product as input or a folder of multiple ENVI files (and not just and individual ENVI file)
- function :func:`pyroSAR.snap.auxil.Workflow.insert_node`: option to insert multiple nodes at once
Auxiliary Data Handling
-----------------------
- function :func:`pyroSAR.auxdata.dem_autoload`:
+ new argument `crop` to optionally return the full extent of all overlapping DEM tiles
+ added download status print messages
+ download and modify a Copernicus DEM index file for future reuse; this removes the need to search the FTP server for files and thus greatly accelerates the process of collecting all files overlapping with the AOI
0.20.0 | 2022-12-27
===================
Drivers
-------
- class :class:`pyroSAR.drivers.ESA`: changed ASAR orbit type from DELFT to DORIS
- class :class:`pyroSAR.drivers.BEAM_DIMAP`: new attributes `meta['incidence']` and `meta['image_geometry']`
- class :class:`pyroSAR.drivers.Archive`: new argument `date_strict` for method :meth:`~pyroSAR.drivers.Archive.select`
SNAP API
--------
- function :func:`pyroSAR.snap.util.geocode`: force multi-looking for ERS1, ERS2, ASAR even if range and azimuth factor are both 1
Auxiliary Data Handling
-----------------------
- function :func:`pyroSAR.auxdata.dem_autoload`:
+ no longer require DEM tiles for creating a mosaic to address ocean cases
+ simplified handling and removed arguments `nodata`, `dst_nodata` and `hide_nodata`
+ the DEM option 'Copernicus 30m Global DEM' now also includes several auxiliary layers that can be downloaded automatically
+ the URLs for DEM options 'SRTM 3Sec' and 'TDX90m' have been updated
- function :func:`pyroSAR.auxdata.dem_create`:
+ option to customize the output DEM via additional keyword arguments to be passed to :func:`spatialist.auxil.gdalwarp`
+ no longer require a nodata value
0.21.0 | 2023-05-11
===================
Drivers
-------
- class :class:`pyroSAR.drivers.Archive`:
+ improved PostgreSQL connection stability
+ method :meth:`~pyroSAR.drivers.Archive.select`: the `vectorobject` geometry is now cloned before being reprojected to EPSG:4326 so that the source geometry remains unaltered
GAMMA API
---------
- the `LAT` module is no longer needed: new pyroSAR-internal implementations can be used if the module is missing (concerns commands `product`, `ratio` and `linear_to_dB`)
- improved backwards compatibility:
+ use `multi_look_ScanSAR` if present and `multi_S1_TOPS` otherwise
+ use `gc_map2` if possible (present and with all needed arguments) and `gc_map` otherwise
+ addressed the case where `gc_map` does not have an argument `OFF_par`
- function `gamma.pixel_area_wrap`: new argument `exist_ok` (this function will be made more visible in the documentation once matured)
- bug fixes:
+ :func:`pyroSAR.gamma.convert2gamma`: raise an error if `S1_bnr=True` but the GAMMA command does not support border noise removal
+ :func:`pyroSAR.gamma.geocode`: removed unneeded underscore in HDR file naming
+ `gamma.pixel_area_wrap`: fixed some issues with occasionally missing intermediate files, e.g. for computing ratios
SNAP API
--------
- function :func:`pyroSAR.snap.util.geocode`: new argument `dem_oversampling_multiple` with default 2 to increase the DEM oversampling factor for terrain flattening
- function :func:`pyroSAR.snap.auxil.erode_edges`:
+ do not attempt to perform erosion if the image only contains nodata (this might happen if only parts of the image were geocoded)
+ make sure that a backscatter image is used for erosion (auxiliary data like the local incidence angle often has a larger valid data extent and using such image for erosion would thus not properly erode edges of the backscatter images; additionally this has the effect that all images will have the same valid data extent after erosion)
+ the written mask files (delineating valid data and nodata after erosion of the backscatter image and used for masking all other images) are now compressed (deflate) so that data volume is decreased significantly
Auxiliary Data Handling
-----------------------
- function :func:`pyroSAR.auxdata.dem_create`:
+ new argument `resampleAlg` to change the resampling algorithm
0.22.0 | 2023-09-21
===================
Drivers
-------
- class :class:`pyroSAR.drivers.Archive`:
+ allow multiple products with same `outname_base`, e.g. Sentinel-1 GRD and SLC; this required the introduction of a second primary key in the database
+ method :meth:`~pyroSAR.drivers.Archive.import_outdated`: option to import data from an old database with only one primary key; this requires the old
database to be opened in legacy mode (new argument `legacy=True`)
- class :class:`pyroSAR.drivers.SAFE`: support for handling Sentinel-1 OCN products (metadata reading and database handling)
Auxiliary Data Handling
-----------------------
- class :class:`pyroSAR.auxdata.DEMHandler`: enabled handling of southern hemisphere geometries.
0.22.1 | 2023-10-11
===================
Drivers
-------
- class :class:`pyroSAR.drivers.BEAM_DIMAP`: enable calling inherited method :meth:`~pyroSAR.drivers.ID.geometry`
0.22.2 | 2023-11-16
===================
SNAP API
--------
- function :func:`pyroSAR.snap.auxil.writer`: fixed bug in ignoring `erode_edges` argument
- function :func:`pyroSAR.snap.auxil.erode_edges`: enable handling of polarimetric matrices
Drivers
-------
- function :func:`pyroSAR.drivers.identify`: enable reading of :class:`~pyroSAR.drivers.TDM` products
Misc
----
- class :class:`pyroSAR.examine.ExamineGamma`: enhanced flexibility in finding GAMMA installation
0.23.0 | 2023-11-23
===================
Drivers
-------
- class :class:`pyroSAR.drivers.Archive`: fixed bug in loading spatialite on Darwin-based systems
Auxiliary Data Handling
-----------------------
changes to Sentinel-1 OSV data handling:
- method :meth:`pyroSAR.S1.OSV.catch`:
+ removed `url_option` 1 (https://scihub.copernicus.eu/gnss)
+ made option 2 the new default option 1 (https://step.esa.int/auxdata/orbits/Sentinel-1)
- added new arguments to the following functions:
+ :func:`pyroSAR.gamma.correctOSV`: `url_option`
+ :func:`pyroSAR.gamma.geocode`: `s1_osv_url_option`
+ :func:`pyroSAR.snap.auxil.orb_parametrize`: `url_option`
+ :func:`pyroSAR.snap.util.geocode`: `s1_osv_url_option`
+ :func:`pyroSAR.snap.util.noise_power`: `osv_url_option`
0.24.0 | 2024-01-10
===================
Drivers
-------
- new base attribute `coordinates`
- enable method :meth:`~pyroSAR.drivers.ID.geometry` for all driver classes
- classes :class:`~pyroSAR.drivers.ESA` and :class:`~pyroSAR.drivers.CEOS_ERS`: removed call to `gdalinfo`
(for increased test capability and speed)
- outsourced regular expressions for product identification into separate module `patterns`
Auxiliary Data Handling
-----------------------
- method :meth:`pyroSAR.S1.OSV.catch`: fixed bug in finding files starting in previous month
0.25.0 | 2024-04-16
===================
Drivers
-------
- class :class:`pyroSAR.drivers.Archive`:
+ replaced column `bbox` with `geometry`; requires database migration
+ method :meth:`~pyroSAR.drivers.Archive.export2shp`: improved column name laundering
SNAP API
--------
- function :func:`pyroSAR.snap.auxil.gpt`: fixed bug that occurred during removal of BNR node
Ancillary Tools
---------------
- new classes :class:`pyroSAR.ancillary.Lock` and :class:`pyroSAR.ancillary.LockCollection`
for custom file/folder locking
Auxiliary Data Handling
-----------------------
- function :func:`pyroSAR.auxdata.dem_create`:
+ make use of new classes :class:`~pyroSAR.ancillary.Lock` and :class:`~pyroSAR.ancillary.LockCollection`
for DEM download and mosaic creation (new argument `lock_timeout`)
+ check whether all VRT source files exist
0.26.0 | 2024-05-15
===================
SNAP API
--------
- compatibility with SNAP 10.
- completely revised configuration mechanisms. See
+ :doc:`/general/configuration`
+ :class:`pyroSAR.examine.ExamineSnap`
+ :class:`pyroSAR.examine.SnapProperties`
0.26.1 | 2024-10-01
===================
Drivers
-------
- method :meth:`pyroSAR.drivers.Archive.select`: do not accept multi-feature vectorobjects
SNAP API
--------
- fixed bug in writing SNAP properties configuration
Auxiliary Data Handling
-----------------------
- class :class:`pyroSAR.auxdata.DEMHandler`: lock created VRT files
0.27.0 | 2024-12-19
===================
Auxiliary Data Handling
-----------------------
- class :class:`pyroSAR.S1.OSV`: fixed bug in searching STEP OSV repository
- function :func:`pyroSAR.auxdata.dem_create`: removed argument `lock_timeout`, do no longer lock the target file.
- function :func:`pyroSAR.auxdata.dem_autoload`: the target VRT file is no longer locked. However, the individual downloaded DEM tiles now are.
Ancillary Tools
---------------
- classes :class:`~pyroSAR.ancillary.Lock` and :class:`~pyroSAR.ancillary.LockCollection`:
enable nested locking
Misc
----
- removed upper Python dependency limit
0.28.0 | 2025-02-20
===================
General
-------
- support for SNAP 11 (tested, no modifications necessary)
- support for Sentinel-1C and D
Drivers
-------
- function :func:`pyroSAR.drivers.identify_many`: new argument `cores` for parallel scene identification
- class :class:`pyroSAR.drivers.SAFE`: enable unzipping of products from CDSE
Auxiliary Data Handling
-----------------------
- removed option for `TDX90m` DEM download because the FTP server has been shut down
(perhaps reactivated in the future if HTTPS authentication can be implemented)
0.29.0 | 2025-04-09
===================
General
-------
- extended support for Sentinel-1C and D
Drivers
-------
- :meth:`pyroSAR.drivers.SAFE.geo_grid`: new method
0.29.1 | 2025-05-12
===================
SNAP API
--------
- support for SNAP 12
0.30.0 | 2025-05-14
===================
Drivers
-------
- changed polygon coordinate order to counter-clockwise for methods
- :meth:`pyroSAR.drivers.ID.bbox`
- :meth:`pyroSAR.drivers.ID.geometry`
- method :meth:`pyroSAR.drivers.Archive.select`: new argument `return_value`
0.30.1 | 2025-08-22
===================
Drivers
-------
- :class:`~pyroSAR.drivers.ESA`: read all `GEOLOCATION GRID ADS` segments to obtain GCPs, not just the first one (bugfix)
GAMMA API
---------
- support for GAMMA version 20250625
- support for polar stereographic projections (via :meth:`~pyroSAR.gamma.auxil.ISPPar.envidict`)
- class :class:`~pyroSAR.gamma.auxil.ISPPar`: raise error if file type is unknown
(instead of setting the `filetype` attribute to `unknown`)
- :func:`~pyroSAR.gamma.util.pixel_area_wrap`:
+ create ENVI HDR files for inputs to :func:`~pyroSAR.gamma.util.lat_ratio` (bugfix)
+ fixed bug in ignoring conditions for writing ENVI HDR files of `pix*` and `gs_ratio` products
- improved readability of tests
0.31.0 | 2025-09-23
===================
Drivers
-------
- :meth:`pyroSAR.drivers.ID.bbox`: new argument `buffer`
- :class:`~pyroSAR.drivers.SAFE`, :class:`~pyroSAR.drivers.BEAM_DIMAP`: new argument `looks`
- :class:`~pyroSAR.drivers.Archive`: context-manage all database handles (code improvement)
GAMMA API
---------
- :func:`~pyroSAR.gamma.util.convert2gamma`, :func:`~pyroSAR.gamma.util.correctOSV`: add file locking
- fixed argument names of `isp.MLI_cat`
0.32.0 | 2025-10-29
===================
SNAP API
--------
- :func:`~pyroSAR.snap.auxil.orb_parametrize`: improved ERS/ASAR orbit handling (more work necessary to always select the best available file, because all options are limited in time (e.g. use option 1 if possible, fall back to option 2 otherwise, etc.); needs a download functionality like :class:`pyroSAR.S1.auxil.OSV` to know which ones are available)
- :func:`~pyroSAR.snap.util.geocode`:
+ explicitly use 'Latest Auxiliary File' for Envisat calibration (just for readability, this is already the default value of the parsed node; other options: 'Product Auxiliary File', 'External Auxiliary File')
+ leave calibration node polarizations field empty when processing all polarizations (otherwise processing may finish without errors but no product is being written; looks like a SNAP bug, also reported in `step-44830 `_)
+ `Calibration` in/out band handling improvements
* select source bands based on sensor and acquisition mode (also described in `step-44830 `_)
* more explicit handling of output bands: all that are not needed set to `False`
* commented out output bands that are apparently not needed
+ fixed sarsim-cc geocoding:
* old: `SAR-Simulation->Cross-Correlation->Terrain-Flattening->SARSim-Terrain-Correction` (does not work because `Terrain-Flattening` does not pass through any source layers)
* new: `SAR-Simulation->Cross-Correlation->Warp->Terrain-Flattening->Terrain-Correction`
* this reveals a flaw in current SNAP processing: the additional `Warp` step introduces unnecessary resampling, the created lookup table is not passed between operators and thus makes the process inefficient, the whole procedure only works with EPSG:4326 as map geometry thus, by the looks of it, requiring three forward geocoding steps (for `SAR-Simulation`, `Terrain-Flattening` and `Terrain-Correction`, respectively)
- :func:`~pyroSAR.snap.auxil.groupbyWorkers`: add `Warp` operator to the group of its source node, because it cannot be executed alone (just like `ThermalNoiseRemoval`)
- ancillary layer writing fix: a layover-shadow-mask can also be created by `SAR-Simulation`, but the output layer is named differently ('layover_shadow_mask' instead of 'layoverShadowMask' by `Terrain-Correction`); this must be handled correctly in :func:`pyroSAR.snap.auxil.writer`
Drivers
-------
- :class:`~pyroSAR.drivers.ESA`:
+ :meth:`~pyroSAR.drivers.ESA.scanMetadata`:
* read out all MPH, SPH, DSD and GEOLOCATION_GRID_ADS metadata and expose it via `meta['origin']`
* use absolute orbit number as `frameNumber` instead of product counter (which often seems to be 0)
* convert original metadata to Python types (int, float, datetime)
* renamed several meta attributes:
- `incidenceAngleMin` -> `incidence_nr`
- `incidenceAngleMax` -> `incidence_fr`
- `rangeResolution`, `azimuthResolution` -> `resolution` (tuple)
- `neszNear`, `neszFar` -> `nesz` (tuple)
+ new method :meth:`~pyroSAR.drivers.ESA.geo_grid` (like for `SAFE`)
+ corrected `acquisition_mode` for ASAR WSM, WSS
+ added MR product type
- :class:`~pyroSAR.drivers.BEAM_DIMAP`
+ improved metadata parsing
* `incidenceAngleMidSwath` not always present, use `incidence_near` and `incidence_far` alternatively
* the cycle number may be named `orbit_cycle` or `CYCLE`
* for pyroSAR `frameNumber`, use `ABS_ORBIT`, not `data_take_id` as for Sentinel-1
* added further `meta` attributes: `swath`, `looks`
* always four `Polarizations` fields present, some may be set to None -> filtered out
* for Sentinel-1 the product and acquisition_mode attributes can be obtained from `ACQUISITION_MODE` and `PRODUCT_TYPE` respectively; for ASAR/ERS `ACQUISITION_MODE` is missing and `PRODUCT_TYPE` contains the original values, e.g. 'ASA_APP_1P' -> must be abstracted
+ added MR product type
- :class:`~pyroSAR.drivers.ID`
+ added methods `start_dt` and `stop_dt` returning timezone-aware datetime objects
Ancillary Tools
---------------
- :meth:`~pyroSAR.ancillary.multilook_factors`: fixed bug in returning 0 as range factor
0.32.1 | 2025-11-06
===================
Auxiliary Data Handling
-----------------------
- class :class:`pyroSAR.S1.OSV`: lock local target files for download (to avoid multi-download and conflicts in parallel processes)
0.33.0 | 2025-12-17
===================
Drivers
-------
- :class:`~pyroSAR.drivers.ESA`:
+ convert coordinates in `meta['origin']` to floats
+ read incident angles directly from metadata, not from custom mapping `ANGLES_RESOLUTION` (from which they have been removed)
+ `ERS.mapping` renaming:
* `ANGLES_RESOLUTION` -> `RESOLUTION_NESZ`
* `get_angles_resolution` -> `get_resolution_nesz`
* `range` -> `res_rg`
* `azimuth` -> `res_az`
* `nesz_near` -> `nesz_nr`
* `nesz_far` -> `nesz_fr`
+ made code more robust by reading SPH and DSD sizes from MPH
+ added WSS mode to `RESOLUTION_NESZ` (although all values are just `None` because they could not be found yet)
+ simplified code and added typing
- :class:`~pyroSAR.drivers.BEAM_DIMAP`:
+ more robust incident angle reading
SNAP API
--------
- support for SNAP 13
Ancillary Tools
---------------
- :meth:`~pyroSAR.ancillary.multilook_factors`: complete reimplementation for more robustness
Auxiliary Data Handling
-----------------------
- class :class:`pyroSAR.auxdata.DEMHandler`: handle ocean areas without DEM coverage using a dummy DEM spanning the target extent instead of the whole globe. The latter is no longer supported by GDAL.
0.33.1 | 2026-01-19
===================
Drivers
-------
- :meth:`pyroSAR.drivers.SAFE.geo_grid`: fixed datetime handling bug by requiring spatialist>=0.16.2
0.33.2 | 2026-01-21
===================
Auxiliary Data Handling
-----------------------
- :meth:`S1.OSV.__catch_step_auxdata` do not stop if no file was found on first URL
0.33.3 | 2026-01-30
===================
GAMMA API
---------
- :class:`pyroSAR.gamma.auxil.ISPPar`: fixed `date` attribute handling
0.34.0 | 2026-02-12
===================
Drivers
-------
- :class:`~pyroSAR.drivers.CEOS_PSR`: add new `meta` attributes `heading` and `heading_scene`
Auxiliary Data Handling
-----------------------
- enable global search (the parameter `geometries` is now optional)
- generation of local indices to reduce web traffic
- option to work in offline mode
Ancillary Tools
---------------
- class :class:`~pyroSAR.ancillary.Lock`: fixed bug where lock file would remain on error if target does not exist
SNAP API
--------
- :meth:`pyroSAR.examine.ExamineSnap.get_version`: more robust mechanism to read version information.
Only the version is returned as string now (instead of a dictionary with version and release date).
- :meth:`pyroSAR.examine.SnapProperties`: support for `snap.conf` files
0.34.1 | 2026-02-12
===================
SNAP API
--------
- :class:`pyroSAR.examine.ExamineSnap`: restore Python 3.10 compatibility (f-string parsing issue)
0.34.2 | 2026-02-13
===================
Ancillary Tools
---------------
- restored Python 3.10 compatibility (import `typing_extensions.Self` instead of `typing.Self` if necessary)
0.34.3 | 2026-02-17
===================
SNAP API
--------
- :class:`pyroSAR.examine.ExamineSnap`: do not call SNAP to read version info in `__init__`
Auxiliary Data Handling
-----------------------
- handle empty URL lists in `DEMHandler.__retrieve`
0.34.4 | 2026-03-03
===================
SNAP API
--------
- :func:`pyroSAR.snap.auxil.erode_edges`: explictly open BEAM-DIMAP .img files with the ENVI driver.
This was necessary because GDAL 3.12 introduces a new `MiraMonRaster` driver, which is used per default for .img files.
Drivers
-------
- use `MEM` instead of `Memory` as driver for creating in-memory :class:`spatialist.vector.Vector` objects. `Memory` has been deprecated.
0.34.5 | 2026-03-06
===================
SNAP API
--------
- :meth:`pyroSAR.examine.ExamineSnap.get_version`: fixed bug where the X11 environment variable `DISPLAY` was preventing SNAP to start
GAMMA API
---------
- handle subprocess signal kills like segmentation fault (SIGSEGV). Before these were just passed through, now a `RuntimeError` is raised.
0.35.0 | 2026-03-09
===================
Archive
-------
- new module :mod:`pyroSAR.archive` extracted from :mod:`pyroSAR.drivers`
- new protocol class :class:`pyroSAR.archive.SceneArchive` to establish an interface for scene search classes (inherited by :class:`pyroSAR.archive.Archive`).
- method `Archive.encode` has been renamed to :meth:`~pyroSAR.archive.Archive.to_str` and has been reimplemented to be more predictable
Drivers
-------
- :class:`~pyroSAR.drivers.ID`: deleted method `export2sqlite`
0.36.0 | 2026-03-10
===================
GAMMA API
---------
- :func:`pyroSAR.gamma.dem.dem_import`:
+ add `shellscript` argument
+ consistently pass `logpath`, `outdir` and `shellscript` to GAMMA commands
- :func:`pyroSAR.gamma.auxil.process`:
+ replace environment variable `base` in the `shellscript` with `OUTDIR` and corrected its usage.
Before, the value of `outdir` in the command was just replaced with `$base`.
This lead to wrong scripts whenever different values for `outdir` were passed to `process`.
Now, no global variable is set and `OUTDIR` is redefined whenever the value of `outdir` changes, e.g.
.. code-block:: bash
OUTDIR=/xyz
command1 $OUTDIR
command2 $OUTDIR
OUTDIR=/abc
command3 $OUTDIR
+ bugfix: the file header and the declaration of `GAMMA_HOME` are now written to the file even if `outdir=None`
0.36.1 | 2026-03-24
===================
GAMMA API
---------
- :func:`pyroSAR.gamma.util.convert2gamma`: fix error in not removing thermal noise due to GAMMA interface change
================================================
FILE: docs/source/about/projects.rst
================================================
######################
Projects using pyroSAR
######################
pyroSAR is/was used in these projects:
- `BACI `_
- `CCI Biomass `_
- `COPA `_
- `EMSAfrica `_
- `GlobBiomass `_
- `SALDi `_
- `SenThIS `_
- `Sentinel4REDD `_
- `SWOS `_
- `BONDS `_
You know of other projects? We'd be happy to know.
================================================
FILE: docs/source/about/publications.rst
================================================
############
Publications
############
.. bibliography::
:style: plain
:list: bullet
:filter: author % "Truckenbrodt"
================================================
FILE: docs/source/about/references.rst
================================================
.. only:: html or text
References
==========
.. bibliography::
:style: plain
================================================
FILE: docs/source/api/ancillary.rst
================================================
Ancillary Functions
===================
.. automodule:: pyroSAR.ancillary
:members:
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
find_datasets
getargs
groupby
groupbyTime
hasarg
multilook_factors
parse_datasetname
seconds
Lock
LockCollection
================================================
FILE: docs/source/api/archive.rst
================================================
Archive
=======
.. automodule:: pyroSAR.archive
:members:
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
Archive
drop_archive
================================================
FILE: docs/source/api/auxdata.rst
================================================
Auxiliary Data Tools
====================
.. automodule:: pyroSAR.auxdata
:members: dem_autoload, dem_create, get_egm_lookup, getasse30_hdr, get_dem_options, DEMHandler
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
dem_autoload
dem_create
get_egm_lookup
getasse30_hdr
get_dem_options
DEMHandler
================================================
FILE: docs/source/api/datacube.rst
================================================
Datacube Tools
==============
.. automodule:: pyroSAR.datacube_util
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/source/api/drivers.rst
================================================
Drivers
=======
.. automodule:: pyroSAR.drivers
:members:
:undoc-members:
:show-inheritance:
.. rubric:: classes
.. autosummary::
:nosignatures:
ID
BEAM_DIMAP
CEOS_PSR
CEOS_ERS
EORC_PSR
ESA
SAFE
TSX
TDM
.. rubric:: functions
.. autosummary::
:nosignatures:
identify
identify_many
filter_processed
getFileObj
parse_date
================================================
FILE: docs/source/api/examine.rst
================================================
Examine
=======
.. automodule:: pyroSAR.examine
:members:
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
================================================
FILE: docs/source/api/figures/snap_geocode.graphml
================================================
S1 GRDThermalNoiseRemovalApply-Orbit-FileMultilookTerrain-FlatteningTerrain-CorrectionRESORB / POEORBDEMReadTOPSAR-DeburstS1 SLCThermalNoiseRemovalReadmulti-GRDFolder 1S1 GRD 2ThermalNoiseRemovalReadSliceAssemblyRemove-GRD-Border-NoiseCalibrationSubsetAOIscatteringAreaFolder 2BandMathsgammaSigmaRatioFolder 3BandMathsexport_extraFolder 4WritelocalIncidenceAngle...backscatterFolder 5LinearToFromdBWrite (2)sigma0/gamma0 elp/rtcCalibrationRemove-GRD-Border-NoiseCalibration
================================================
FILE: docs/source/api/figures/workflow_readme.txt
================================================
workflow files were created with the yED Graph Editor (https://www.yworks.com/products/yed)
setting the vector bridge style:
Preferences -> Display -> Bridge Style
================================================
FILE: docs/source/api/gamma/api.rst
================================================
.. _gamma-command-api:
GAMMA Command API
-----------------
This is an attempt to make it easier to execute GAMMA commands by offering automatically parsed Python functions.
Thus, instead of executing the command via shell:
.. code-block:: shell
offset_fit offs ccp off.par coffs - 0.15 3 0 > offset_fit.log
one can wrap it in a Python script:
.. code-block:: python
import os
from pyroSAR.gamma.api import isp
workdir = '/data/gamma_workdir'
parameters = {'offs': os.path.join(workdir, 'offs'),
'ccp': os.path.join(workdir, 'ccp'),
'OFF_par': os.path.join(workdir, 'off.par'),
'coffs': os.path.join(workdir, 'coffs'),
'thres': 0.15,
'npoly': 3,
'interact_flag': 0,
'logpath': workdir}
isp.offset_fit(**parameters)
A file `offset_fit.log` containing the output of the command is written in both cases. Any parameters, which should
not be written and need to be set to - in the shell can be omitted in the Python call since all optional parameters
of the functions are already defined with '-' as a default.
The documentation can be called like with any Python function:
.. code-block:: python
from pyroSAR.gamma.api import isp
help(isp.offset_fit)
Parser Documentation
********************
.. automodule:: pyroSAR.gamma.parser
:members:
:undoc-members:
:show-inheritance:
API Demo
********
This is a demonstration of an output script as generated automatically by function
:func:`~pyroSAR.gamma.parser.parse_module` for the GAMMA module `ISP`.
Within each function, the command name and all parameters are passed to function
:func:`~pyroSAR.gamma.process`, which converts all input to :py:obj:`str` and then calls the command via the
:mod:`subprocess` module.
.. automodule:: pyroSAR.gamma.parser_demo
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/source/api/gamma/auxil.rst
================================================
Auxiliary functionality
-----------------------
.. automodule:: pyroSAR.gamma.auxil
:members:
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
do_execute
ISPPar
Namespace
par2hdr
process
slc_corners
Spacing
UTM
================================================
FILE: docs/source/api/gamma/dem.rst
================================================
DEM tools
---------
.. automodule:: pyroSAR.gamma.dem
:members: dem_autocreate, dem_import, dempar, fill, hgt, hgt_collect, makeSRTM, mosaic, swap
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
dem_autocreate
dem_import
dempar
fill
hgt
hgt_collect
makeSRTM
mosaic
swap
================================================
FILE: docs/source/api/gamma/error.rst
================================================
Error handling
--------------
.. automodule:: pyroSAR.gamma.error
:members:
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
gammaErrorHandler
GammaUnknownError
================================================
FILE: docs/source/api/gamma/figures/gamma_geocode.graphml
================================================
par_S1_GRDS1_OPOD_vecmulti_look_MLIgc_mappixel_areaproductgeocode_backlinear_to_dBS1 GRDDEMgamma0-rtc_geo_dBgeocode_backinc_geomlimli2deminclutlssig2gam_ratiogamma0-rtcgamma0-rtc_geoPOEORB
================================================
FILE: docs/source/api/gamma/index.rst
================================================
GAMMA
=====
.. toctree::
:maxdepth: 1
util
auxil
dem
api
error
================================================
FILE: docs/source/api/gamma/util.rst
================================================
Processing
----------
.. automodule:: pyroSAR.gamma.util
:members:
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
calibrate
convert2gamma
correctOSV
gc_map_wrap
geocode
lat_linear_to_db
lat_product
lat_ratio
multilook
ovs
pixel_area_wrap
S1_deburst
================================================
FILE: docs/source/api/sentinel-1.rst
================================================
Sentinel-1 Tools
================
.. automodule:: pyroSAR.S1
:members: OSV, removeGRDBorderNoise
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
OSV
removeGRDBorderNoise
================================================
FILE: docs/source/api/snap.rst
================================================
SNAP
====
Processing
----------
.. automodule:: pyroSAR.snap.util
:members:
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
geocode
noise_power
Workflow Parsing and Execution
------------------------------
.. automodule:: pyroSAR.snap.auxil
:members: gpt, execute, parse_node, parse_recipe, split, groupbyWorkers, Workflow, Node, Par, Par_BandMath, dem_parametrize, geo_parametrize, mli_parametrize, orb_parametrize, sub_parametrize
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
gpt
execute
parse_node
parse_recipe
split
groupbyWorkers
Workflow
Node
Par
Par_BandMath
dem_parametrize
geo_parametrize
mli_parametrize
orb_parametrize
sub_parametrize
General Utilities
-----------------
.. automodule:: pyroSAR.snap.auxil
:members: erode_edges, writer
:undoc-members:
:show-inheritance:
.. autosummary::
:nosignatures:
erode_edges
writer
================================================
FILE: docs/source/conf.py
================================================
import sys
import os
import datetime
from importlib.metadata import version as get_version
project = 'pyroSAR'
authors = 'the pyroSAR Developers'
year = datetime.datetime.now().year
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# The full version, including alpha/beta/rc tags.
version_full = get_version(project)
# The short X.Y version.
version = '.'.join(version_full.split('.')[:2])
# release is automatically added to the latex document title and header
release = version
autodoc_mock_imports = ['osgeo', 'sqlalchemy', 'sqlalchemy_utils', 'geoalchemy2',
'lxml', 'progressbar', 'spatialist']
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.6'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.napoleon',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.bibtex',
'sphinxcontrib.cairosvgconverter',
'sphinx_autodoc_typehints'
]
bibtex_bibfiles = ['references.bib']
# autodoc_default_flags = ['members']
autosummary_generate = []
intersphinx_mapping = {
'osgeo': ('https://gdal.org', None),
'python': ('https://docs.python.org/3', None),
'requests': ('https://requests.readthedocs.io/en/latest', None),
'scipy': ('https://docs.scipy.org/doc/scipy', None),
'spatialist': ('https://spatialist.readthedocs.io/en/latest', None),
'sqlalchemy': ('https://docs.sqlalchemy.org/en/latest', None),
'sqlalchemy-utils': ('https://sqlalchemy-utils.readthedocs.io/en/latest', None)
}
napoleon_google_docstring = False
napoleon_numpy_docstring = True
napoleon_include_init_with_doc = False
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = ' (c) 2014-{}, {}'.format(year, authors)
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# " v documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = '{}doc'.format(project)
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
'preamble': r'''
\setcounter{tocdepth}{2}
\setlength{\headheight}{27pt}
''',
# disable floating
'figure_align': 'H',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index',
'{}.tex'.format(project),
r'{} Documentation'.format(project),
authors, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index',
project,
'{} Documentation'.format(project),
[authors],
1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index',
project,
'{} Documentation'.format(project),
authors,
project,
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
================================================
FILE: docs/source/general/DEM.rst
================================================
###############
DEM Preparation
###############
SAR processing requires a high resolution Digital Elevation Model for ortho-rectification and normalization of
terrain-specific imaging effects.
In SNAP, the DEM is usually auto-downloaded by the software itself and the user only specifies the DEM source to be
used, e.g. SRTM. pyroSAR's convenience function :func:`pyroSAR.snap.util.geocode` can additionally pass SNAP's option to use an
external DEM file via parameters `externalDEMFile`, `externalDEMNoDataValue` and `externalDEMApplyEGM`.
GAMMA does not provide ways to automatically download DEMs for processing and the user thus also needs to provide an
external DEM file in GAMMA's own format. However, several commands are available to prepare these DEMs including
conversion from geoid heights to WGS84 ellipsoid heights.
pyroSAR offers several convenience functions to automatically prepare DEM mosaics from different
sources to use them in either SNAP or GAMMA.
Download of DEM Tiles
=====================
The function :func:`pyroSAR.auxdata.dem_autoload` offers convenient download of tiles from different sources
overlapping with user-defined geometries. Optionally, a buffer in degrees can be defined.
This function internally makes use of the function :func:`spatialist.auxil.gdalbuildvrt`.
.. code-block:: python
from pyroSAR.auxdata import dem_autoload
from spatialist import Vector
site = 'mysite.shp'
vrt = 'mosaic.vrt'
with Vector(site) as vec:
vrt = dem_autoload(geometries=[vec],
demType='SRTM 1Sec HGT',
vrt=vrt,
buffer=0.1)
The tiles, which are delivered in compressed archives, are directly connected to a virtual mosaic using GDAL's VRT
format, making it easier to work with them by treating them as a single file.
For downloading tiles of some DEM types, e.g. `TDX90m`, an account needs to be created and the user credentials be passed to
function :func:`~pyroSAR.auxdata.dem_autoload`. See the function's documentation for further details.
The files are stored in SNAP's location for auxiliary data, which per default is `$HOME/.snap/auxdata/dem`.
The function :func:`~pyroSAR.auxdata.dem_autoload` has proven beneficial in server environments where not each node has internet access and the tiles thus
need to be downloaded prior to processing on these nodes.
DEM Mosaicing
=============
In a next step we create a mosaic GeoTIFF cropped to the boundaries defined in the VRT using the function
:func:`pyroSAR.auxdata.dem_create`.
The spatial reference system, WGS84 UTM 32N in this case, is defined by its EPSG code but also several other options
are available. Since for SAR processing we are interested in ellipsoid heights, we call the function with the according
parameter `geoid_convert` set to `True`.
This function makes use of :func:`spatialist.auxil.gdalwarp`.
Conversion of vertical reference systems, e.g. from geoid to ellipsoid, requires GDAL version >=2.2.
.. code-block:: python
from pyroSAR.auxdata import dem_create
outname = 'mysite_srtm.tif'
dem_create(src=vrt, dst=outname,
t_srs=32632, tr=(20, 20),
resampling_method='bilinear',
geoid_convert=True, geoid='EGM96')
GAMMA Import
============
For convenience, pyroSAR's :mod:`~pyroSAR.gamma` submodule contains a function :func:`pyroSAR.gamma.dem.dem_autocreate`, which is a
combination of functions :func:`~pyroSAR.auxdata.dem_autoload` and :func:`~pyroSAR.auxdata.dem_create` and further
executes GAMMA commands for format conversion.
It offers the same parameters as these two functions and a user can additionally decide whether geoid-ellipsoid
conversion is done in GDAL or in GAMMA via parameter `geoid_mode`. The output is a file in GAMMA format, which can
directly be used for processing by e.g. function :func:`pyroSAR.gamma.geocode`.
================================================
FILE: docs/source/general/OSV.rst
================================================
####################################
Handling of Orbit State Vector Files
####################################
SAR products require additional orbit state vector (OSV) information to improve their spatial location accuracy.
This information is found in externally hosted files, which need to be downloaded separately and are then used by SAR
processing software to update the product's metadata. Currently, pyroSAR only supports handling of Sentinel-1 OSV files.
In SNAP, the corresponding processing node is called `Apply-Orbit-File`, which automatically downloads the OSV file and
updates the scene's metadata. The files are stored in SNAP's location for auxiliary data,
which per default is `$HOME/.snap/auxdata/Orbits`.
In GAMMA, on the other hand, the downloading has to be done manually after which the command `isp.S1_OPOD_vec` can be
used for updating the metadata. pyroSAR offers several approaches for automatically downloading these
files. The central tool for managing existing files and downloading new ones is the class :class:`pyroSAR.S1.OSV`, which
is used for all approaches.
.. note::
in the following a dedicated directory is defined into which the files will be downloaded. If this directory is
not defined (default is `None`), the files will be downloaded to SNAP's auxiliary data location (see above). This is
recommended as the files are kept in a central location that is accessible both by SNAP and by pyroSAR's GAMMA
functionality.
approach 1: direct download by time span
========================================
In case a large number of scenes is to be processed and/or no internet access is available during processing, the files
can be downloaded by time span to a central directory. This is the most basic approach using the central class
:class:`~pyroSAR.S1.OSV` mentioned above, making use of its methods :meth:`~pyroSAR.S1.OSV.catch` and
:meth:`~pyroSAR.S1.OSV.retrieve`.
.. code-block:: python
from pyroSAR.S1 import OSV
osvdir = '/path/to/osvdir'
with OSV(osvdir) as osv:
files = osv.catch(sensor='S1A', osvtype='POE',
start='20170101T000000', stop='20180101T000000',
url_option=1)
osv.retrieve(files)
Two sub-directories `POEORB` and `RESORB` will be created in `osvdir` containing the downloaded files. `POEORB` will
contain the `Precise Orbit Ephemerides` files, which are the most accurate but are first available about two weeks after
the scene's acquisition. `RESORB` describes the `Restituted Orbit` files, which are less accurate but available
directly after acquisition. See method :meth:`~pyroSAR.S1.OSV.catch` for download URL options.
approach 2: manual download per scene
=====================================
The method :meth:`pyroSAR.drivers.SAFE.getOSV` can be used to directly retrieve the files relevant for the scene.
This method internally uses the methods described above with a time span limited to that of the scene acquisition.
.. code-block:: python
from pyroSAR import identify
scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'
id = identify(scene)
match = id.getOSV(osvdir='/path/to/osvdir', osvType='POE', returnMatch=True)
print(match)
approach 3: direct download and scene metadata update (GAMMA only)
==================================================================
The convenience function :func:`pyroSAR.gamma.correctOSV` internally makes use of approach 2 and additionally directly
executes the GAMMA command `isp.S1_OPOD_vec` for updating the scene's metadata with the information of the OSV file.
The scene has to be unpacked first (see :meth:`pyroSAR.drivers.SAFE.unpack`).
.. code-block:: python
from pyroSAR import identify
from pyroSAR.gamma import correctOSV
scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'
id = identify(scene)
id.unpack('tmpdir')
correctOSV(id=id, osvdir='/path/to/osvdir', osvType='POE')
approach 4: automatic download and use during processing
========================================================
The processing function :func:`pyroSAR.gamma.geocode` automatically downloads OSV files needed for processing and
updates the scene's metadata using function :func:`~pyroSAR.gamma.correctOSV`.
It is thus the most convenient way to handle these files and related processing steps.
The parameter `allow_RES_OSV` can be used to allow processing with `RES` files if no `POE` file is available yet.
.. code-block:: python
from pyroSAR.gamma import geocode
scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'
geocode(scene=scene,
dem='/path/to/demfile',
tmpdir='tmpdir',
outdir='outdir',
targetres=20,
osvdir='/path/to/osvdir',
allow_RES_OSV=False)
Similarly, the function :func:`pyroSAR.snap.util.geocode` also automatically downloads OSV files and chooses the best
matching OSV type for processing.
.. code-block:: python
from pyroSAR.snap import geocode
scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'
geocode(infile=scene,
outdir='outdir',
allow_RES_OSV=True)
In contrast to the GAMMA function, the OSV download directory cannot be set because of the fixed SNAP auxiliary data
location. The type of the available OSV file is written to the workflow XML file for processing:
.. code-block:: xml
Apply-Orbit-FileSentinel Restituted (Auto Download)3false
================================================
FILE: docs/source/general/configuration.rst
================================================
#############
Configuration
#############
pyroSAR stores configuration under `$HOME/.pyrosar`.
It contains a file `config.ini` which stores installation paths of SNAP and GAMMA.
The installations are first identified by running the respective `Examine*` class (e.g. :class:`~pyroSAR.examine.ExamineSnap`):
.. code-block:: python
from pyroSAR.examine import ExamineSnap
config = ExamineSnap()
SNAP configuration can also be modified with this class, either by the object properties `userpath` and `auxdatapath` or by the underlying :class:`~pyroSAR.examine.SnapProperties` object:
.. code-block:: python
config.userpath = '/path/to/snap/data'
config.snap_properties['snap.userdir'] = '/path/to/snap/data'
The values are directly written to either `snap.auxdata.properties` or `snap.properties` under `$HOME/.snap/etc`.
The content of these files will override that in the files found under `etc` in the SNAP installation folder.
Setting a parameter to `None` will comment out the value in the respective file.
================================================
FILE: docs/source/general/filenaming.rst
================================================
###########
File Naming
###########
pyroSAR internally uses a fixed naming scheme to keep track of processed results. For each scene an identifier is created,
which contains the sensor, acquisition mode, orbit (ascending or descending) and the time stamp of the acquisition start.
For example `S1A__IW___A_20150222T170750`, which is created by calling method :meth:`~pyroSAR.drivers.ID.outname_base`:
.. code-block:: python
from pyroSAR import identify
id = identify('S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip')
print(id.outname_base())
For each attribute a fixed number of digits is reserved. In case the attribute is shorter than this number,
the rest of the digits is filled with underscores. I.e., the sensor field is four digits long, but 'S1A' only three.
Thus, `S1A_` is the sensor slot. In the same way, `IW__` is the acquisition mode slot, which is also four digits long.
`A` denotes ascending orbit, the time stamp is in format YYYYmmddTHHMMSS.
Processing functions like :func:`~pyroSAR.gamma.geocode` add suffixes to this identifier to further keep track of
individual processing steps performed on the dataset.
This core concept is used by many pyroSAR functions internally to keep track of which scenes have been processed before.
================================================
FILE: docs/source/general/installation.rst
================================================
############
Installation
############
conda
=====
Starting with version 0.11, pyroSAR is distributed via `conda-forge `_
and can easily be installed with
::
conda install --channel conda-forge pyrosar
This is by far the easiest way to work with pyroSAR on any operating system.
pip
===
Installation with pip is also supported and offers the advantage to install intermediate development stages directly
from the GitHub repository. Mind however that several dependencies like GDAL cannot fully be installed this way.
See further below for detailed Linux dependency installation instructions.
Installation of pip (Linux):
::
sudo apt-get install python-pip
The latest stable release of pyroSAR can then be installed:
::
python -m pip install pyroSAR
For installation of the latest master branch on GitHub, we need the version control system git. On Windows, git can be
downloaded from `git-scm.com `_. On Linux you can install it via command line:
::
sudo apt-get install git
Once everything is set up, pyroSAR is ready to be installed:
::
python -m pip install git+https://github.com/johntruckenbrodt/pyroSAR.git
Dependencies
============
The more specific instructions below are intended for Linux users who like to work outside of the Anaconda environment.
GDAL
----
pyroSAR requires GDAL version 2.1 with GEOS and PROJ4 as dependencies as well as the GDAL Python binding.
Ubuntu
++++++
Starting with release Yakkety (16.10), Ubuntu comes with GDAL >2.1.
You can install it like this:
::
sudo apt-get install python-gdal python3-gdal gdal-bin
For older Ubuntu releases you can add the ubuntugis repository to apt prior to installation to install version >2.1:
::
sudo add-apt-repository ppa:ubuntugis/ppa
sudo apt-get update
This way the required dependencies (GEOS and PROJ4 in particular) are also installed.
You can check the version by typing:
::
gdalinfo --version
Debian
++++++
Starting with Debian 9 (Stretch) GDAL is available in version >2.1 in the official repository.
Building from source
++++++++++++++++++++
Alternatively, you can build GDAL and the dependencies from source. The script `pyroSAR/install/install_deps.sh`
gives specific instructions on how to do it. It is not yet intended to run this script via shell, but rather to
follow the instructions step by step.
SQLite + SpatiaLite
-------------------
While `sqlite3` and its Python binding are usually already installed, the `spatialite` extension needs to be
added. Two packages exist, `libspatialite` and `mod_spatialite`. Both can be used by pyroSAR.
On Ubuntu, `mod_spatialite` has been found to be easier to setup with `sqlite` and can be installed via `apt`:
::
sudo apt-get install libsqlite3-mod-spatialite
On CentOS, `libspatialite` including shared objects for extension loading can be installed via `yum`:
::
sudo yum install libspatialite-devel
The following can be run in Python to test the needed functionality:
.. code-block:: python
import sqlite3
# setup an in-memory database
con=sqlite3.connect(':memory:')
# enable loading extensions and load spatialite
con.enable_load_extension(True)
try:
con.load_extension('mod_spatialite.so')
except sqlite3.OperationalError:
con.load_extension('libspatialite.so')
In case loading extensions is not permitted you might need to install the package `pysqlite2`
together with a static build of `sqlite3`. See the script `pyroSAR/install/install_deps.sh` for instructions.
There you can also find instructions on how to install `spatialite` from source.
To test `pysqlite2` you can import it as follows and then run the test above:
.. code-block:: python
from pysqlite2 import dbapi2 as sqlite3
Installing this package is likely to cause problems with the `sqlite3` library installed on the system.
Thus, it is safer to build a static `sqlite3` library for it (see installation script).
GAMMA
-----
GAMMA's home directory as environment variable 'GAMMA_HOME' is expected to end either as GAMMA_SOFTWARE- or GAMMA_SOFTWARE/.
If this differs in your install and cannot be changed, a workaround is adjusting the expected pattern in :class:`~pyroSAR.examine.ExamineGamma`.
================================================
FILE: docs/source/general/logging.rst
================================================
#######
Logging
#######
pyroSAR makes use of the :mod:`logging` module to display status messages for running processes.
See `Logging HOWTO `_ for a basic tutorial.
To display log messages you may add one of the following examples to your script:
.. code-block:: python
import logging
# basic info
logging.basicConfig(level=logging.INFO)
# basic info with some message filtering
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
# detailed debug info
logging.basicConfig(level=logging.DEBUG)
================================================
FILE: docs/source/general/processing.rst
================================================
#################################
SAR Image Handling and Processing
#################################
Image Metadata
==============
Let's start working with our actual satellite data.
At first we load the scene into pyroSAR for analysis of the metadata:
.. code-block:: python
from pyroSAR import identify
name = 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'
scene = identify(name)
print(scene)
This will automatically identify the scene, scan it for metadata and print a summary of selected metadata entries.
Several attribute names (e.g. `sensor` and `acquisition_mode`) are standardized for all SAR scenes.
Further entries, whose names are not standardized, can be found in a dictionary `scene.meta`.
The function :func:`~pyroSAR.drivers.identify` will loop through all SAR images classes (:mod:`pyroSAR.drivers`) and return an
object of the class that was successful in identifying the scene (:class:`~pyroSAR.drivers.SAFE` in this case).
.. _database-handling:
Database Handling
=================
Now that we have made ourselves familiar with the scene, we can import its metadata into an SQLite database using class
:class:`~pyroSAR.archive.Archive`:
.. code-block:: python
from pyroSAR import Archive
dbfile = 'scenes.db'
with Archive(dbfile) as archive:
archive.insert(scene)
`dbfile` is a file either containing an already existing database or one to be created.
In this case an SQLite database with SpatiaLite extension is created.
Alternatively, PostgreSQL + PostGIS can be used.
Let's assume our database contains a number of scenes and we want to select some for processing.
We have a shapefile, which contains a geometry delimiting our test site for which we want to
process some Sentinel-1 scenes.
We already processed some scenes in the past and the results are stored in a directory
`outdir`. We only want to select scenes which have not been processed to this directory before.
Furthermore, we are only interested in scenes acquired in Ground Range Detected (GRD) Interferometric Wide
Swath mode (IW), which contain a VV band.
.. code-block:: python
from spatialist import Vector
archive = Archive('scenes.db')
outdir = '/path/to/processed/results'
maxdate = '20171231T235959'
with Vector('site.shp') as site:
selection_proc = archive.select(vectorobject=site,
processdir=outdir,
maxdate=maxdate,
sensor=('S1A', 'S1B'),
product='GRD',
acquisition_mode='IW',
vv=1)
archive.close()
Here we use the vector geometry driver of package :doc:`spatialist `, which is developed alongside of pyroSAR.
The :class:`spatialist.Vector ` object is then passed to method
:meth:`Archive.select `.
.. _processing:
Processing
==========
The returned `selection_proc` is a list of file names for the scenes we selected from the database, which we can now
pass to a processing function:
.. code-block:: python
from pyroSAR.snap import geocode
# the target pixel spacing in meters
spacing = 20
for scene in selection_proc:
geocode(infile=scene, outdir=outdir, tr=spacing, scaling='db', shapefile=site)
The function :func:`snap.geocode ` is a basic utility for SNAP.
It will perform all necessary steps to subset, resample, topographically normalize, geocode and scale the input
image and write GeoTIFF files to the selected output directory.
All necessary files like orbit state vectors and SRTM DEM tiles are downloaded automatically in the background by SNAP.
SNAP is most conveniently used with workflow XMLs. The function geocode parses a workflow for the particular scene,
parametrizes it (depending on the scene type and selected processing parameters) and writes it to the output directory.
It then calls the command `gpt`, which is SNAP's command line interface, on the workflow to execute the processing steps.
================================================
FILE: docs/source/general/snap.rst
================================================
########
SNAP API
########
pyroSAR offers a collection of tools to parse SNAP XML workflows and execute them with SNAP's Graph Processing Tool
(`GPT `_). All functionality is
purely performed in Python and only the command line calls to GPT interact with SNAP. SNAP's Python API
`snappy `_ is not used
due to installation limitations and processing performance.
The following serves as a minimal example to showcase the core API functionality. A more complex example is given with
function :func:`pyroSAR.snap.util.geocode`.
.. code-block:: python
from pyroSAR.snap.auxil import parse_recipe, parse_node
workflow = parse_recipe('blank')
read = parse_node('Read')
read.parameters['file'] = 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'
read.parameters['formatName'] = 'SENTINEL-1'
workflow.insert_node(read)
tnr = parse_node('ThermalNoiseRemoval')
workflow.insert_node(tnr, before=read.id)
bnr = parse_node('Remove-GRD-Border-Noise')
bnr.parameters['selectedPolarisations'] = ['VV']
workflow.insert_node(bnr, before=tnr.id)
write = parse_node('Write')
write.parameters['file'] = 'outname'
write.parameters['formatName'] = 'BEAM-DIMAP'
workflow.insert_node(write, before=bnr.id)
workflow.write('outname_proc')
Here, the function :func:`~pyroSAR.snap.auxil.parse_recipe` is first used to create an empty workflow object of type
:class:`~pyroSAR.snap.auxil.Workflow`.
Using the function :func:`~pyroSAR.snap.auxil.parse_node`, individual processing nodes can be loaded as
:class:`~pyroSAR.snap.auxil.Node` objects and parameterized using a :class:`~pyroSAR.snap.auxil.Par` object via
``.parameters``.
The method :meth:`~pyroSAR.snap.auxil.Workflow.insert_node` is then used to insert the nodes into the workflow including
linking of the nodes by modifying the source node entries. E.g. `Read` is set as source of the newly inserted
`Remove-GRD-Border-Noise` node. As a last step, the workflow is written to an XML file with method
:meth:`~pyroSAR.snap.auxil.Workflow.write`.
This XML file can then be passed to function :func:`~pyroSAR.snap.auxil.gpt` to process the workflow by internally
calling the GPT command line tool:
.. code-block:: python
from pyroSAR.snap.auxil import gpt
gpt('outname_proc.xml', tmpdir='.')
workflow splitting
==================
Simple workflows like the one shown above take only a few seconds to process, but the more processing nodes are added,
the more time it obviously takes to execute them. However, it was observed that executing long workflows takes longer
and consumes more memory than executing each node individually. pyroSAR offers functionality to split long workflows
into smaller groups and execute them in sequence with intermediate files being written in a temporary directory.
First, the workflow nodes are grouped to contain a defined number of processing nodes, i.e. everything but `Read` and
`Write`, using function :func:`~pyroSAR.snap.auxil.groupbyWorkers`:
.. code-block:: python
from pyroSAR.snap.auxil import groupbyWorkers
groupbyWorkers('outname_proc.xml', n=1)
This will return
.. code-block:: python
[['Read', 'ThermalNoiseRemoval'], ['Remove-GRD-Border-Noise', 'Write']]
These groups can directly be passed passed to function :func:`~pyroSAR.snap.auxil.gpt` via parameter ``groups``.
Internally the workflow is then split based on the groups and written to new XML files in a temporary directory using
function :func:`~pyroSAR.snap.auxil.split`. In this case, two workflows would be created:
- `Read` -> `ThermalNoiseRemoval` -> `Write`
- `Read` -> `Remove-GRD-Border-Noise` -> `Write`
These new files are then executed in sequence with intermediate `BEAM-DIMAP`
files written in the same directory as the sub-workflow XML files. After processing this directory is deleted unless
parameter ``cleanup`` of function :func:`~pyroSAR.snap.auxil.gpt` is set to ``False``.
backwards compatibility
=======================
With new versions of SNAP, new parameters are introduced and others removed. If a new parameter is not listed in the
node's XML description its default is used by SNAP during processing. If, however, a parameter is contained in the
workflow that is no longer supported by SNAP, the processing will be terminated. This can easily happen if the workflow
was created by an older version of SNAP. pyroSAR reads the error messages and, if an unknown parameter is mentioned,
deletes this parameter from the workflow, saves it to a new file and executes it instead.
troubleshooting
===============
SNAP as well as pyroSAR's SNAP API are constantly being developed and bugs are unfortunately inevitable.
This section is intended to guide users to better interpret errors and unexpected behaviour.
*The process is running but seems inactive without any progress.*
This might be related to SNAP's inability to download needed DEM tiles.
SNAP will be stuck in a loop infinitely trying to download the missing tiles.
This can be identified by directly running gpt in the command line.
However, by operating gpt through a Python subprocess, it is not possible to see those command line messages.
Only after a process has terminated, all messages can be retrieved and be written to log or error files.
A simple approach to interpret such a behaviour is to first create a workflow XML file with
:func:`~pyroSAR.snap.util.geocode`'s parameter ``test=True`` (so that only the XML is written but it is not executed):
.. code-block:: python
from pyroSAR.snap import geocode
geocode(scene='S1A_IW_GRDH_1SDV_20200720T023849_20200720T023914_033532_03E2B5_2952.zip',
outdir='/test', test=True)
and then run gpt on it directly in the shell (i.e. outside of Python):
::
gpt /test/S1A__IW___D_20200720T023849_VV_Orb_ML_TC_proc.xml
This way one can directly see gpt's status, which in this case might be
::
SEVERE: org.esa.snap.core.dataop.dem.ElevationFile: java.lang.reflect.InvocationTargetException
================================================
FILE: docs/source/index.rst
================================================
###################################
Welcome to pyroSAR's documentation!
###################################
General Topics
==============
.. toctree::
:maxdepth: 1
general/installation
general/filenaming
general/configuration
general/OSV
general/DEM
general/snap
general/processing
general/logging
API Documentation
=================
.. toctree::
:maxdepth: 1
api/drivers
api/archive
api/snap
api/gamma/index
api/sentinel-1
api/auxdata
api/datacube
api/ancillary
api/examine
About
=====
.. toctree::
:maxdepth: 1
about/projects
about/changelog
about/publications
about/references
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
================================================
FILE: docs/source/references.bib
================================================
% Encoding: UTF-8
@article{Ali2018,
author = {Ali, I. and Cao, S. and Naeimi, V. and Paulik, C. and Wagner, W.},
title = {Methods to Remove the Border Noise From Sentinel-1 Synthetic Aperture Radar Data: Implications and Importance For Time-Series Analysis},
journal = {IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing},
volume = {11},
number = {3},
pages = {777-786},
DOI = {10.1109/Jstars.2017.2787650},
year = {2018},
type = {Journal Article}
}
@techreport{Miranda2018,
author = {Miranda, N. and Hajduch, G.},
title = {Masking "No-value" Pixels on GRD Products generated by the Sentinel-1 ESA IPF},
institution = {CLS},
month = {29 January},
url = {https://sentinel.esa.int/documents/247904/2142675/Sentinel-1-masking-no-value-pixels-grd-products-note},
year = {2018},
type = {Report}
}
@article{Small2011,
author = {Small, D.},
title = {Flattening Gamma: Radiometric Terrain Correction for SAR Imagery},
journal = {IEEE Transactions on Geoscience and Remote Sensing},
volume = {49},
number = {8},
pages = {3081-3093},
DOI = {10.1109/Tgrs.2011.2120616},
year = {2011},
type = {Journal Article}
}
@inproceedings{Truckenbrodt2019,
author = {Truckenbrodt, J. and Cremer, F. and Baris, I. and Eberle, J.},
title = {pyroSAR: A Framework for Large-Scale SAR Satellite Data Processing},
booktitle = {Big Data from Space},
editor = {Soille, P. and Loekken, S. and Albani, S.},
address = {Luxembourg},
publisher = {Publications Office of the European Union},
pages = {197-200},
ISBN = {ISBN 978-92-76-00034-1},
DOI = {10.2760/848593},
year = {2019},
type = {Conference Proceedings}
}
@article{Truckenbrodt2019a,
author = {Truckenbrodt, J. and Freemantle, T. and Williams, C. and Jones, T. and Small, D. and Dubois, C. and Thiel, C. and Rossi, C. and Syriou, A. and Giuliani, G.},
title = {Towards Sentinel-1 SAR Analysis-Ready Data: A Best Practices Assessment on Preparing Backscatter Data for the Cube},
journal = {Data},
volume = {4},
number = {3},
ISSN = {2306-5729},
DOI = {10.3390/data4030093},
year = {2019},
type = {Journal Article}
}
@article{Visvalingam1993,
author = {Visvalingam, M. and Whyatt, J. D.},
title = {Line Generalization by Repeated Elimination of Points},
journal = {Cartographic Journal},
volume = {30},
number = {1},
pages = {46-51},
ISSN = {0008-7041},
DOI = {10.1179/caj.1993.30.1.46},
year = {1993},
type = {Journal Article}
}
================================================
FILE: environment-dev.yml
================================================
name: ps_test_dev
channels:
- conda-forge
dependencies:
- gdal>=2.4
- geoalchemy2<0.14.0
- libgdal
- libspatialite>=5.1.0
- lxml
- numpy
- packaging
- pillow
- progressbar2
- psycopg2
- python>=3.10
- pyyaml
- requests
- shapely
- spatialist>=0.17.0
- sqlalchemy>=1.4,<2.0
- sqlalchemy-utils>=0.37,<0.42
- coverage
- pytest
- flake8
================================================
FILE: environment-doc.yml
================================================
name: ps_doc
channels:
- conda-forge
dependencies:
- python>=3.10
- matplotlib
- numpy
- sphinx
- sphinx_rtd_theme
- sphinxcontrib-bibtex>=2.2
- sphinxcontrib-svg2pdfconverter
- cairosvg
- sphinx-autodoc-typehints
================================================
FILE: environment.yml
================================================
name: ps_test
channels:
- conda-forge
dependencies:
- gdal>=2.4
- geoalchemy2<0.14.0
- libgdal
- libspatialite>=5.1.0
- lxml
- numpy
- packaging
- pillow
- progressbar2
- psycopg2
- python>=3.10
- pyyaml
- requests
- shapely
- spatialist>=0.17.0
- sqlalchemy>=1.4,<2.0
- sqlalchemy-utils>=0.37,<0.42
================================================
FILE: pyproject.toml
================================================
[build-system]
requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2", "wheel"]
[project]
name = "pyroSAR"
description = "a framework for large-scale SAR satellite data processing"
requires-python = ">=3.10"
license = { file = "LICENSE.txt" }
maintainers = [
{ name = "John Truckenbrodt", email = "john.truckenbrodt@dlr.de" }
]
classifiers = [
"License :: OSI Approved :: MIT License",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3"
]
dynamic = ["version", "readme", "dependencies"]
[project.urls]
repository = "https://github.com/johntruckenbrodt/pyroSAR"
documentation = "https://pyrosar.readthedocs.io/en/latest/"
[project.optional-dependencies]
test = ["pytest"]
docs = [
"sphinx", "sphinx_rtd_theme", "sphinxcontrib-bibtex",
"sphinxcontrib-svg2pdfconverter", "cairosvg",
"sphinx-autodoc-typehints"
]
[tool.setuptools.dynamic]
dependencies = { file = ["requirements.txt"] }
readme = { file = ["README.md"], content-type = "text/markdown" }
[tool.setuptools_scm]
================================================
FILE: pyroSAR/ERS/__init__.py
================================================
from .auxil import passdb_create, passdb_query
from .mapping import get_resolution_nesz
================================================
FILE: pyroSAR/ERS/auxil.py
================================================
###############################################################################
# tools for processing ERS satellite data
# Copyright (c) 2014-2019, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
################################################################################
import os
import math
from spatialist import sqlite_setup
from spatialist.ancillary import HiddenPrints
from datetime import datetime, timedelta
import logging
log = logging.getLogger(__name__)
def passdb_create(ers1passes, ers2passes, dbname):
"""
create a sqlite database from ERS pass tables
downloaded from http://www.deos.tudelft.nl/ers/phases/starttimes.html.
There you can also find additional information on the file structure and background.
The fields `phase`, `cycle`, `pass`, `starttime` and `endtime` are read from the table.
The latter two are converted to format YYYY-MM-DD HH:MM:SS.SSS.
The fields `cycle` and `pass` are converted to integer.
All five fields plus the name of the sensor (`ERS1` or `ERS2`) are then stored to the database.
Parameters
----------
ers1passes: str
the name of the ERS-1 pass table
ers2passes: str
the name of the ERS-2 pass table
dbname: str
the name of the database to write the results to
Returns
-------
"""
columns = {'satellite': 'TEXT',
'phase': 'TEXT',
'cycleNumber': 'INTEGER',
'passNumber': 'INTEGER',
'starttime': 'TEXT',
'endtime': 'TEXT'}
con = sqlite_setup(driver=dbname)
create_string = '''CREATE TABLE if not exists data ({})'''.format(
', '.join([' '.join(x) for x in columns.items()]))
cursor = con.cursor()
cursor.execute(create_string)
def time_convert(timestring):
dt = datetime(1985, 1, 1) + timedelta(seconds=float(timestring))
return dt.strftime('%Y-%m-%d %H:%M:%S.%f')
insert_string = '''INSERT INTO data({0}) VALUES({1})''' \
.format(', '.join(columns.keys()),
', '.join(['?'] * len(columns.keys())))
for satellite, filename in [('ERS1', ers1passes), ('ERS2', ers2passes)]:
with open(filename, 'r') as table:
for line in table:
phase, cycle, passNumber, starttime, endtime = line.split()[0:5]
insertion = [satellite, phase,
int(cycle), int(passNumber),
time_convert(starttime), time_convert(endtime)]
if satellite == 'ERS1':
log.info(tuple(insertion))
cursor.execute(insert_string, tuple(insertion))
con.commit()
con.close()
def passdb_query(satellite, acqtime, dbname=None):
"""
query the orbit information for an ERS acquisition
Parameters
----------
satellite: {'ERS1', 'ERS2'}
the name of the satellite
acqtime: datetime.datetime
the acquisition of the satellite image
dbname: str, None
the name of the database as created by :func:`passdb_create`. If None, the default database delivered with
pyroSAR is used
Returns
-------
"""
if satellite == 'ERS1':
# the last timestamp for which specific ERS-1 orbit information is present,
# afterwards that of ERS-2 is used
last = datetime.strptime('1996-06-02 21:59:26.618659', '%Y-%m-%d %H:%M:%S.%f')
sat = 'ERS2' if acqtime > last else 'ERS1'
elif satellite == 'ERS2':
sat = 'ERS2'
else:
raise ValueError("satellite must be either 'ERS1' or 'ERS2', was '{}'".format(satellite))
if dbname is None:
dbname = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'erspasses.db')
with HiddenPrints():
con = sqlite_setup(driver=dbname)
cursor = con.cursor()
acqtime_str = acqtime.strftime('%Y-%m-%d %H:%M:%S.%f')
query = '''SELECT * FROM data WHERE satellite = ? AND starttime <= ? AND endtime >= ?'''
cursor.execute(query, (sat, acqtime_str, acqtime_str))
fetch = cursor.fetchall()
if len(fetch) == 0:
cursor.execute(query, ('ERS2', acqtime_str, acqtime_str))
fetch = cursor.fetchall()
result = dict(zip(['satellite', 'phase', 'cycleNumber', 'passNumber'], fetch[0][0:4]))
result['satellite'] = satellite
result['orbitNumber_rel'] = int(math.ceil(result['passNumber'] / 2.))
return result
================================================
FILE: pyroSAR/ERS/mapping.py
================================================
from typing import Literal
RESOLUTION_NESZ = {
'ERS1': {
'IMP': {
'IS2': {
'res_rg': 25.04,
'res_az': 21.51,
'nesz_nr': 26.8,
'nesz_fr': 26
},
'std_dev': 20
},
'IMS': {
'IS2': {
'res_rg': 5.32,
'res_az': 9.66,
'nesz_nr': 26.8,
'nesz_fr': 26
},
'std_dev': 20
}
},
'ERS2': {
'IMP': {
'IS2': {
'res_rg': 21.63,
'res_az': 25.19,
'nesz_nr': 23.1,
'nesz_fr': 21.5
},
'std_dev': 20
},
'IMS': {
'IS2': {
'res_rg': 5.33,
'res_az': 9.83,
'nesz_nr': 23.1,
'nesz_fr': 21.5
},
'std_dev': 20
}
},
'ASAR': {
'IMP': {
'IS1': {
'res_rg': 30.86,
'res_az': 22.14,
'nesz_nr': 25.1,
'nesz_fr': 19.2
},
'IS2': {
'res_rg': 24.90,
'res_az': 22.14,
'nesz_nr': 21.8,
'nesz_fr': 20.5
},
'IS3': {
'res_rg': 24.84,
'res_az': 22.14,
'nesz_nr': 22.6,
'nesz_fr': 20.5
},
'IS4': {
'res_rg': 25.56,
'res_az': 22.14,
'nesz_nr': 22.3,
'nesz_fr': 19.1
},
'IS5': {
'res_rg': 25.73,
'res_az': 22.14,
'nesz_nr': 21.4,
'nesz_fr': 19
},
'IS6': {
'res_rg': 26.15,
'res_az': 22.14,
'nesz_nr': 24,
'nesz_fr': 21.2
},
'IS7': {
'res_rg': 26.59,
'res_az': 22.14,
'nesz_nr': 23,
'nesz_fr': 20.4
},
'std_dev': 5
},
'IMS': {
'IS1': {
'res_rg': 5.77,
'res_az': 8.43,
'nesz_nr': 25.1,
'nesz_fr': 19.2
},
'IS2': {
'res_rg': 5.77,
'res_az': 8.43,
'nesz_nr': 21.8,
'nesz_fr': 20.5
},
'IS3': {
'res_rg': 5.77,
'res_az': 8.43,
'nesz_nr': 22.6,
'nesz_fr': 20.5
},
'IS4': {
'res_rg': 5.77,
'res_az': 8.43,
'nesz_nr': 22.3,
'nesz_fr': 19.1
},
'IS5': {
'res_rg': 5.77,
'res_az': 8.43,
'nesz_nr': 21.4,
'nesz_fr': 19
},
'IS6': {
'res_rg': 5.77,
'res_az': 8.43,
'nesz_nr': 24,
'nesz_fr': 21.2
},
'IS7': {
'res_rg': 5.77,
'res_az': 8.43,
'nesz_nr': 23,
'nesz_fr': 20.4
},
'std_dev': 5
},
'APP': {
'IS1': {
'res_rg': 31.22,
'res_rg_new': 31.22,
'res_az': 27.45,
'nesz_nr': 25.1,
'nesz_fr': 19.2
},
'IS2': {
'res_rg': 25.23,
'res_rg_new': 24.10,
'res_az': 27.45,
'nesz_nr': 21.8,
'nesz_fr': 20.5
},
'IS3': {
'res_rg': 24.74,
'res_rg_new': 24.30,
'res_az': 27.45,
'nesz_nr': 22.6,
'nesz_fr': 20.5
},
'IS4': {
'res_rg': 25.46,
'res_rg_new': 25.30,
'res_az': 27.45,
'nesz_nr': 22.3,
'nesz_fr': 19.1
},
'IS5': {
'res_rg': 25.70,
'res_rg_new': 25.35,
'res_az': 27.45,
'nesz_nr': 21.4,
'nesz_fr': 19
},
'IS6': {
'res_rg': 26.07,
'res_rg_new': 25.90,
'res_az': 27.45,
'nesz_nr': 24,
'nesz_fr': 21.2
},
'IS7': {
'res_rg': 26.53,
'res_rg_new': 26.32,
'res_az': 27.45,
'nesz_nr': 23,
'nesz_fr': 20.4
},
'std_dev': 10
},
'APS': {
'IS1': {
'res_rg': 4.3,
'res_az': 8.39,
'nesz_nr': 25.1,
'nesz_fr': 19.2
},
'IS2': {
'res_rg': 4.3,
'res_az': 8.39,
'nesz_nr': 21.8,
'nesz_fr': 20.5
},
'IS3': {
'res_rg': 4.3,
'res_az': 8.39,
'nesz_nr': 22.6,
'nesz_fr': 20.5
},
'IS4': {
'res_rg': 4.3,
'res_az': 8.39,
'nesz_nr': 22.3,
'nesz_fr': 19.1
},
'IS5': {
'res_rg': 4.3,
'res_az': 8.39,
'nesz_nr': 21.4,
'nesz_fr': 19
},
'IS6': {
'res_rg': 4.3,
'res_az': 8.39,
'nesz_nr': 24,
'nesz_fr': 21.2
},
'IS7': {
'res_rg': 4.3,
'res_az': 8.39,
'nesz_nr': 23,
'nesz_fr': 20.4
},
'std_dev': 10
},
'WSM': {
'WS': {
'res_rg': 150,
'res_az': 150,
'nesz_nr': 19.5,
'nesz_fr': 23.5
},
'std_dev': 20
},
'WSS': {
'WS': {
'res_rg': None,
'res_az': None,
'nesz_nr': None,
'nesz_fr': None
},
'std_dev': None
}
}
}
def get_resolution_nesz(
sensor: Literal['ERS1', 'ERS2', 'ASAR'],
mode: Literal['APP', 'APS', 'IMP', 'IMS', 'WSM', 'WSS'],
swath_id: Literal['IS1', 'IS2', 'IS3', 'IS4', 'IS5', 'IS6', 'IS7', 'WS'],
date: str
) -> tuple[int | float | None, int | float | None, int | float | None, int | float | None]:
"""
Get acquisition characteristics not contained in the product metadata:
- range resolution
- azimuth resolution
- near range noise equivalent sigma zero (NESZ)
- far range NESZ
Parameters
----------
sensor:
the satellite sensor
mode:
the sensor acquisition mode
swath_id:
the sensor swath ID
date:
the acquisition date formatted as YYYYmmdd/YYYYmmddTHHMMSS
Returns
-------
the attributes listed above
"""
suffix = '_new' if mode == 'APP' and date > '20090528' else ''
data = RESOLUTION_NESZ[sensor][mode][swath_id]
return (data[f'res_rg{suffix}'], data['res_az'],
data['nesz_nr'], data['nesz_fr'])
================================================
FILE: pyroSAR/S1/__init__.py
================================================
__author__ = 'john'
from .auxil import OSV, removeGRDBorderNoise
================================================
FILE: pyroSAR/S1/auxil.py
================================================
###############################################################################
# general utilities for Sentinel-1
# Copyright (c) 2016-2025, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
import os
import re
import sys
import requests
import tempfile
import zipfile as zf
from io import BytesIO
from datetime import datetime, timedelta
from dateutil import parser as dateutil_parser
from dateutil.relativedelta import relativedelta
import xml.etree.ElementTree as ET
import numpy as np
from osgeo import gdal
from osgeo.gdalconst import GA_Update
from . import linesimplify as ls
from pyroSAR.examine import ExamineSnap
from pyroSAR.ancillary import Lock
import progressbar as pb
from spatialist.ancillary import finder
import logging
log = logging.getLogger(__name__)
try:
import argparse
except ImportError:
try:
os.remove(os.path.join(os.path.dirname(sys.argv[0]), 'locale.pyc'))
finally:
import argparse
def init_parser():
"""
initialize argument parser for S1 processing utilities
"""
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--transform', action='store_true', help='transform the final DEM to UTM coordinates')
parser.add_argument('-l', '--logfiles', action='store_true', help='create logfiles of the executed GAMMA commands')
parser.add_argument('-i', '--intermediates', action='store_true', help='keep intermediate files')
parser.add_argument('-q', '--quiet', action='store_true', help='suppress standard console prints')
parser.add_argument('-tr', '--targetresolution', default=20, help='the target resolution in meters for x and y',
type=int)
parser.add_argument('-fg', '--func_geoback', default=2, help='backward geocoding interpolation function; '
'0 - Nearest Neighbor, 1 - Bicubic Spline, 2 - Bicubic Spline-Log; '
'method 1: negative values possible (e.g. in urban areas) - use method 2 to avoid this',
type=int)
parser.add_argument('-fi', '--func_interp', default=0,
help='function for interpolation of layover/shadow/foreshortening/DEM gaps; '
'0 - set to 0, 1 - linear interpolation, 2 - actual value, 3 - nn-thinned', type=int)
parser.add_argument('-poe', '--poedir', default=None,
help='directory containing aux_poeorb (precise orbit ephemerides) orbit state vector files')
parser.add_argument('-res', '--resdir', default=None,
help='directory containing aux_resorb (restituted orbit) orbit state vector files')
parser.add_argument('zipfile', help='S1 zipped scene archive to be used')
parser.add_argument('tempdir', help='temporary directory for intermediate files')
parser.add_argument('outdir', help='output directory')
parser.add_argument('srtmdir', help='directory containing SRTM hgt tiles (subdirectories possible)')
return parser
# todo check existence not by file name but by start and stop time; files are sometimes re-published
class OSV(object):
"""
interface for management of S1 Orbit State Vector (OSV) files
input is a directory which is supposed to contain, or already contains, OSV files.
Two subdirectories are expected and created otherwise:
one for Precise Orbit Ephemerides (POE) named POEORB and one for Restituted Orbit (RES) files named RESORB
Using method :meth:`match` the corresponding POE (priority) or RES file is returned for a timestamp.
Timestamps are always handled in the format YYYYmmddTHHMMSS.
Parameters
----------
osvdir: str
the directory to write the orbit files to
timeout: int or tuple or None
the timeout in seconds for downloading OSV files as provided to :func:`requests.get`
See Also
--------
`requests timeouts `_
"""
def __init__(self, osvdir=None, timeout=300):
self.timeout = timeout
if osvdir is None:
try:
auxdatapath = ExamineSnap().auxdatapath
except AttributeError:
auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')
osvdir = os.path.join(auxdatapath, 'Orbits', 'Sentinel-1')
self.outdir_poe = os.path.join(osvdir, 'POEORB')
self.outdir_res = os.path.join(osvdir, 'RESORB')
self.pattern = r'S1[ABCD]_OPER_AUX_(?:POE|RES)ORB_OPOD_[0-9TV_]{48}\.EOF'
self.pattern_fine = r'(?PS1[ABCD])_OPER_AUX_' \
r'(?P(?:POE|RES)ORB)_OPOD_' \
r'(?P[0-9]{8}T[0-9]{6})_V' \
r'(?P[0-9]{8}T[0-9]{6})_' \
r'(?P[0-9]{8}T[0-9]{6})\.EOF'
self._init_dir()
self._reorganize()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return
def _init_dir(self):
"""
create directories if they don't exist yet
"""
for dir in [self.outdir_poe, self.outdir_res]:
if not os.path.isdir(dir):
os.makedirs(dir)
def _parse(self, file):
basename = os.path.basename(file)
groups = re.match(self.pattern_fine, basename).groupdict()
return groups
def _reorganize(self):
"""
compress and move EOF files into subdirectories
Returns
-------
"""
message = True
for subdir in [self.outdir_poe, self.outdir_res]:
if not os.path.isdir(subdir):
continue
files = finder(subdir, [self.pattern], recursive=False, regex=True)
for eof in files:
base = os.path.basename(eof)
target = os.path.join(self._subdir(eof), base + '.zip')
os.makedirs(os.path.dirname(target), exist_ok=True)
if not os.path.isfile(target):
if message:
log.info('compressing and reorganizing EOF files')
message = False
with zf.ZipFile(file=target,
mode='w',
compression=zf.ZIP_DEFLATED) as zip:
zip.write(filename=eof,
arcname=base)
os.remove(eof)
def _typeEvaluate(self, osvtype):
"""
evaluate the 'osvtype' method argument and return the corresponding remote repository and local directory
Parameters
----------
osvtype: str
the type of orbit files required; either 'POE' or 'RES'
Returns
-------
tuple of str
the remote repository and local directory of the osv type
"""
if osvtype not in ['POE', 'RES']:
raise IOError('type must be either "POE" or "RES"')
if osvtype == 'POE':
return self.outdir_poe
else:
return self.outdir_res
def __catch_aux_sentinel(self, sensor, start, stop, osvtype='POE'):
url = 'http://aux.sentinel1.eo.esa.int'
skeleton = '{url}/{osvtype}ORB/{year}/{month:02d}/{day:02d}/'
files = []
date_search = start
busy = True
while busy:
url_sub = skeleton.format(url=url,
osvtype=osvtype,
year=date_search.year,
month=date_search.month,
day=date_search.day)
response = requests.get(url_sub, timeout=self.timeout)
response.raise_for_status()
result = response.text
files_sub = list(set(re.findall(self.pattern, result)))
if len(files_sub) == 0:
break
for file in files_sub:
match = re.match(self.pattern_fine, file)
start2 = datetime.strptime(match.group('start'), '%Y%m%dT%H%M%S')
stop2 = datetime.strptime(match.group('stop'), '%Y%m%dT%H%M%S')
if sensor == match.group('sensor'):
if start2 < stop and stop2 > start:
log.info(url_sub)
files.append({'filename': file,
'href': url_sub + '/' + file,
'auth': None})
if start2 >= stop:
busy = False
date_search += timedelta(days=1)
return files
def __catch_step_auxdata(self, sensor, start, stop, osvtype='POE'):
url = 'https://step.esa.int/auxdata/orbits/Sentinel-1'
skeleton = '{url}/{osvtype}ORB/{sensor}/{year}/{month:02d}/'
if osvtype not in ['POE', 'RES']:
raise RuntimeError("osvtype must be either 'POE' or 'RES'")
if isinstance(sensor, str):
sensor = [sensor]
files = []
date_search_final = datetime(year=stop.year, month=stop.month, day=1)
for sens in sensor:
date_search = datetime(year=start.year,
month=start.month,
day=1)
date_search -= relativedelta(months=1)
busy = True
while busy:
url_sub = skeleton.format(url=url,
osvtype=osvtype,
sensor=sens,
year=date_search.year,
month=date_search.month)
log.info(url_sub)
response = requests.get(url_sub, timeout=self.timeout)
if response.status_code != 404:
response.raise_for_status()
result = response.text
files_sub = list(set(re.findall(self.pattern, result)))
for file in files_sub:
match = re.match(self.pattern_fine, file)
start2 = datetime.strptime(match.group('start'), '%Y%m%dT%H%M%S')
stop2 = datetime.strptime(match.group('stop'), '%Y%m%dT%H%M%S')
if start2 < stop and stop2 > start:
files.append({'filename': file,
'href': url_sub + '/' + file + '.zip',
'auth': None})
if date_search == date_search_final:
busy = False
date_search += relativedelta(months=1)
if date_search > datetime.now():
busy = False
return files
def __catch_gnss(self, sensor, start, stop, osvtype='POE'):
url = 'https://scihub.copernicus.eu/gnss'
redirect = 'https://dhusfeed.dhus.onda-dias.net/gnss'
auth = ('gnssguest', 'gnssguest')
# a dictionary for storing the url arguments
query = {}
if osvtype == 'POE':
query['producttype'] = 'AUX_POEORB'
elif osvtype == 'RES':
query['producttype'] = 'AUX_RESORB'
else:
raise RuntimeError("osvtype must be either 'POE' or 'RES'")
if sensor in ['S1A', 'S1B', 'S1C', 'S1D']:
query['platformname'] = 'Sentinel-1'
# filename starts w/ sensor
query['filename'] = '{}*'.format(sensor)
elif sorted(sensor) == ['S1A', 'S1B', 'S1C', 'S1D']:
query['platformname'] = 'Sentinel-1'
else:
raise RuntimeError('unsupported input for parameter sensor')
# the collection of files to be returned
collection = []
date_start = start.strftime('%Y-%m-%dT%H:%M:%SZ')
date_stop = stop.strftime('%Y-%m-%dT%H:%M:%SZ')
# append the time frame to the query dictionary
query['beginPosition'] = '[{} TO {}]'.format(date_start, date_stop)
query['endPosition'] = '[{} TO {}]'.format(date_start, date_stop)
query_list = []
for keyword, value in query.items():
query_elem = '{}:{}'.format(keyword, value)
query_list.append(query_elem)
query_str = ' '.join(query_list)
target = '{}/search?q={}&format=json'.format(url, query_str)
log.info(target)
def _parse_gnsssearch_json(search_dict):
parsed_dict = {}
# Will return ['entry'] as dict if only one item
# If so just make a list
if isinstance(search_dict, dict):
search_dict = [search_dict]
for entry in search_dict:
id = entry['id']
entry_dict = {}
for key, value in entry.items():
if key == 'title':
entry_dict[key] = value
elif key == 'id':
entry_dict[key] = value
elif key == 'ondemand':
if value.lower() == 'true':
entry_dict[key] = True
else:
entry_dict[key] = False
elif key == 'str':
for elem in value:
entry_dict[elem['name']] = elem['content']
elif key == 'link':
for elem in value:
if 'rel' in elem.keys():
href_key = 'href_' + elem['rel']
entry_dict[href_key] = elem['href']
else:
entry_dict['href'] = elem['href']
elif key == 'date':
for elem in value:
entry_dict[elem['name']] = dateutil_parser.parse(elem['content'])
parsed_dict[id] = entry_dict
return parsed_dict
def _parse_gnsssearch_response(response_json):
if 'entry' in response_json.keys():
search_dict = response_json['entry']
parsed_dict = _parse_gnsssearch_json(search_dict)
else:
parsed_dict = {}
return parsed_dict
response = requests.get(target, auth=auth, timeout=self.timeout)
response.raise_for_status()
response_json = response.json()['feed']
total_results = response_json['opensearch:totalResults']
subquery = [link['href'] for link in response_json['link'] if link['rel'] == 'self'][0]
subquery = subquery.replace(redirect, url.strip())
if int(total_results) > 10:
subquery = subquery.replace('rows=10', 'rows=100')
while subquery:
subquery_response = requests.get(subquery, auth=auth, timeout=self.timeout)
subquery_response.raise_for_status()
subquery_json = subquery_response.json()['feed']
subquery_products = _parse_gnsssearch_response(subquery_json)
items = list(subquery_products.values())
for item in items:
item['auth'] = auth
collection += list(subquery_products.values())
if 'next' in [link['rel'] for link in subquery_json['link']]:
subquery = [link['href'] for link in subquery_json['link'] if link['rel'] == 'next'][0]
subquery = subquery.replace(redirect, url.strip())
else:
subquery = None
if osvtype == 'RES' and self.maxdate('POE', 'stop') is not None:
collection = [x for x in collection
if self.date(x['filename'], 'start') > self.maxdate('POE', 'stop')]
for item in collection:
item['href'] = item['href'].replace(redirect, url)
return collection
def catch(self, sensor, osvtype='POE', start=None, stop=None, url_option=1):
"""
check a server for files
Parameters
----------
sensor: str or list[str]
The S1 mission(s):
- 'S1A'
- 'S1B'
- 'S1C'
- 'S1D'
osvtype: str
the type of orbit files required
start: str or None
the date to start searching for files in format YYYYmmddTHHMMSS
stop: str or None
the date to stop searching for files in format YYYYmmddTHHMMSS
url_option: int
the OSV download URL option
- 1: https://step.esa.int/auxdata/orbits/Sentinel-1
Returns
-------
list[dict]
the product dictionary of the remote OSV files, with href
"""
log.info('searching for new {} files'.format(osvtype))
if start is not None:
start = datetime.strptime(start, '%Y%m%dT%H%M%S')
else:
start = datetime.strptime('2014-07-31', '%Y-%m-%d')
# set the defined date or the current date otherwise
if stop is not None:
stop = datetime.strptime(stop, '%Y%m%dT%H%M%S')
else:
stop = datetime.now()
if url_option == 1:
items = self.__catch_step_auxdata(sensor, start, stop, osvtype)
else:
raise ValueError("unknown URL option")
if osvtype == 'RES' and self.maxdate('POE', 'stop') is not None:
items = [x for x in items
if self.date(x['filename'], 'start') > self.maxdate('POE', 'stop')]
log.info('found {} results'.format(len(items)))
return items
def date(self, file, datetype):
"""
extract a date from an OSV file name
Parameters
----------
file: str
the OSV file
datetype: {'publish', 'start', 'stop'}
one of three possible date types contained in the OSV filename
Returns
-------
str
a time stamp in the format YYYYmmddTHHMMSS
"""
return self._parse(file)[datetype]
def clean_res(self):
"""
delete all RES files for whose date a POE file exists
"""
maxdate_poe = self.maxdate('POE', 'stop')
if maxdate_poe is not None:
deprecated = [x for x in self.getLocals('RES') if self.date(x, 'stop') < maxdate_poe]
log.info('deleting {} RES file{}'.format(len(deprecated), '' if len(deprecated) == 1 else 's'))
for item in deprecated:
os.remove(item)
def getLocals(self, osvtype='POE'):
"""
get a list of local files of specific type
Parameters
----------
osvtype: {'POE', 'RES'}
the type of orbit files required
Returns
-------
list[str]
a selection of local OSV files
"""
directory = self._typeEvaluate(osvtype)
return finder(directory, [self.pattern], regex=True)
def maxdate(self, osvtype='POE', datetype='stop'):
"""
return the latest date of locally existing POE/RES files
Parameters
----------
osvtype: {'POE', 'RES'}
the type of orbit files required
datetype: {'publish', 'start', 'stop'}
one of three possible date types contained in the OSV filename
Returns
-------
str
a timestamp in format YYYYmmddTHHMMSS
"""
directory = self._typeEvaluate(osvtype)
files = finder(directory, [self.pattern], regex=True)
return max([self.date(x, datetype) for x in files]) if len(files) > 0 else None
def mindate(self, osvtype='POE', datetype='start'):
"""
return the earliest date of locally existing POE/RES files
Parameters
----------
osvtype: {'POE', 'RES'}
the type of orbit files required
datetype: {'publish', 'start', 'stop'}
one of three possible date types contained in the OSV filename
Returns
-------
str
a timestamp in format YYYYmmddTHHMMSS
"""
directory = self._typeEvaluate(osvtype)
files = finder(directory, [self.pattern], regex=True)
return min([self.date(x, datetype) for x in files]) if len(files) > 0 else None
def match(self, sensor, timestamp, osvtype='POE'):
"""
return the corresponding OSV file for the provided sensor and time stamp.
The file returned is one which covers the acquisition time and, if multiple exist,
the one which was published last.
In case a list of options is provided as osvtype, the file of higher accuracy (i.e. POE over RES) is returned.
Parameters
----------
sensor: str
The S1 mission:
- 'S1A'
- 'S1B'
timestamp: str
the time stamp in the format 'YYYmmddTHHMMSS'
osvtype: str or list[str]
the type of orbit files required; either 'POE', 'RES' or a list of both
Returns
-------
str
the best matching orbit file (overlapping time plus latest publication date)
"""
# list all locally existing files of the defined type
if osvtype in ['POE', 'RES']:
locals = self.getLocals(osvtype)
# filter the files to those which contain data for the defined time stamp
files = [x for x in locals if self.date(x, 'start') <= timestamp <= self.date(x, 'stop')]
files = [x for x in files if os.path.basename(x).startswith(sensor)]
if len(files) > 0:
# select the file which was published last
best = self.sortByDate(files, 'publish')[-1]
return best
elif len(files) == 1:
return files[0]
return None
elif sorted(osvtype) == ['POE', 'RES']:
best = self.match(sensor=sensor, timestamp=timestamp, osvtype='POE')
if not best:
best = self.match(sensor=sensor, timestamp=timestamp, osvtype='RES')
return best
def retrieve(self, products, pbar=False):
"""
download a list of product dictionaries into the respective subdirectories, i.e. POEORB or RESORB
Parameters
----------
products: list[dict]
a list of remotely existing OSV product dictionaries as returned by method :meth:`catch`
pbar: bool
add a progressbar?
Returns
-------
"""
downloads = []
for product in products:
if all(key not in ['filename', 'href'] for key in product.keys()):
raise RuntimeError("product dictionaries must contain 'filename' and 'href' keys")
basename = product['filename']
remote = product['href']
auth = product['auth']
outdir = self._subdir(basename)
os.makedirs(outdir, exist_ok=True)
local = os.path.join(outdir, basename) + '.zip'
if not os.path.isfile(local):
downloads.append((remote, local, basename, auth))
if len(downloads) == 0:
return
log.info('downloading {} file{}'.format(len(downloads), '' if len(downloads) == 1 else 's'))
if pbar:
progress = pb.ProgressBar(max_value=len(downloads))
else:
progress = None
i = 0
for remote, local, basename, auth in downloads:
with Lock(local):
if not os.path.isfile(local):
response = requests.get(remote, auth=auth, timeout=self.timeout)
response.raise_for_status()
infile = response.content
try:
if remote.endswith('.zip'):
with zf.ZipFile(file=BytesIO(infile)) as tmp:
members = tmp.namelist()
target = [x for x in members if re.search(basename, x)][0]
with zf.ZipFile(local, 'w') as outfile:
outfile.writestr(data=tmp.read(target),
zinfo_or_arcname=basename)
else:
with zf.ZipFile(
file=local,
mode='w',
compression=zf.ZIP_DEFLATED
) as outfile:
outfile.writestr(zinfo_or_arcname=basename,
data=infile)
except Exception as e:
os.remove(local)
raise
if pbar:
i += 1
progress.update(i)
if pbar:
progress.finish()
self.clean_res()
def sortByDate(self, files, datetype='start'):
"""
sort a list of OSV files by a specific date type
Parameters
----------
files: list[str]
some OSV files
datetype: {'publish', 'start', 'stop'}
one of three possible date types contained in the OSV filename
Returns
-------
list[str]
the input OSV files sorted by the defined date
"""
return sorted(files, key=lambda x: self.date(x, datetype))
def _subdir(self, file):
"""
| return the subdirectory in which to store the EOF file,
| i.e. basedir/{type}ORB/{sensor}/{year}/{month}
| e.g. basedir/POEORB/S1A/2018/12
Parameters
----------
file: str
the EOF filename
Returns
-------
str
the target directory
"""
attr = self._parse(file)
outdir = self._typeEvaluate(attr['type'][:3])
start = self.date(file, datetype='start')
start = datetime.strptime(start, '%Y%m%dT%H%M%S')
month = '{:02d}'.format(start.month)
outdir = os.path.join(outdir, attr['sensor'],
str(start.year), month)
return outdir
def removeGRDBorderNoise(scene, method='pyroSAR'):
"""
Mask out Sentinel-1 image border noise. This function implements the method for removing GRD border noise as
published by ESA :cite:`Miranda2018` and implemented in SNAP and additionally adds further refinement of the result using an image
border line simplification approach. In this approach the border between valid and invalid pixels is first
simplified using the poly-line vertex reduction method by Visvalingam and Whyatt :cite:`Visvalingam1993`.
The line segments of the new border are then shifted until all pixels considered invalid before the simplification
are again on one side of the line. See image below for further clarification.
Parameters
----------
scene: pyroSAR.drivers.SAFE
the Sentinel-1 scene object
method: str
the border noise removal method to be applied; one of the following:
- 'ESA': the pure implementation as described by ESA
- 'pyroSAR': the ESA method plus the custom pyroSAR refinement
.. figure:: figures/S1_bnr.png
:scale: 30%
Demonstration of the border noise removal for a vertical left image border. The area under the respective lines
covers pixels considered valid, everything above will be masked out. The blue line is the result of the noise
removal as recommended by ESA, in which a lot of noise is still present. The red line is the over-simplified
result using the Visvalingam-Whyatt method. The green line is the final result after further correcting the
VW-simplified result.
"""
if scene.product != 'GRD':
raise RuntimeError('this method is intended for GRD only')
if scene.compression is not None:
raise RuntimeError('scene is not yet unpacked')
if method not in ['pyroSAR', 'ESA']:
raise AttributeError("parameter 'method' must be either 'pyroSAR' or 'ESA'")
blocksize = 2000
# compute noise scaling factor
if scene.meta['IPF_version'] >= 2.9:
log.info('border noise removal not necessary for IPF version {}'.format(scene.meta['IPF_version']))
return
elif scene.meta['IPF_version'] <= 2.5:
knoise = {'IW': 75088.7, 'EW': 56065.87}[scene.acquisition_mode]
cads = scene.getFileObj(scene.findfiles('calibration-s1[ab]-[ie]w-grd-(?:hh|vv)')[0])
caltree = ET.fromstring(cads.read())
cads.close()
adn = float(caltree.find('.//calibrationVector/dn').text.split()[0])
if scene.meta['IPF_version'] < 2.34:
scalingFactor = knoise * adn
else:
scalingFactor = knoise * adn * adn
else:
scalingFactor = 1
# read noise vectors from corresponding annotation xml
noisefile = scene.getFileObj(scene.findfiles('noise-s1[ab]-[ie]w-grd-(?:hh|vv)')[0])
noisetree = ET.fromstring(noisefile.read())
noisefile.close()
noiseVectors = noisetree.findall('.//noiseVector')
# define boundaries of image subsets to be masked (4x the first lines/samples of the image boundaries)
subsets = [(0, 0, blocksize, scene.lines),
(0, 0, scene.samples, blocksize),
(scene.samples - blocksize, 0, scene.samples, scene.lines),
(0, scene.lines - blocksize, scene.samples, scene.lines)]
# extract column indices of noise vectors
yi = np.array([int(x.find('line').text) for x in noiseVectors])
# create links to the tif files for a master co-polarization and all other polarizations as slaves
master = scene.findfiles('s1.*(?:vv|hh).*tiff')[0]
ras_master = gdal.Open(master, GA_Update)
ras_slaves = [gdal.Open(x, GA_Update) for x in scene.findfiles('s1.*tiff') if x != master]
outband_master = ras_master.GetRasterBand(1)
outband_slaves = [x.GetRasterBand(1) for x in ras_slaves]
# iterate over the four image subsets
for subset in subsets:
log.info(subset)
xmin, ymin, xmax, ymax = subset
xdiff = xmax - xmin
ydiff = ymax - ymin
# linear interpolation of noise vectors to array
noise_interp = np.empty((ydiff, xdiff), dtype=float)
for i in range(0, len(noiseVectors)):
if ymin <= yi[i] <= ymax:
# extract row indices of noise vector
xi = [int(x) for x in noiseVectors[i].find('pixel').text.split()]
# extract noise values
noise = [float(x) for x in noiseVectors[i].find('noiseLut').text.split()]
# interpolate values along rows
noise_interp[yi[i] - ymin, :] = np.interp(range(0, xdiff), xi, noise)
for i in range(0, xdiff):
yi_t = yi[(ymin <= yi) & (yi <= ymax)] - ymin
# interpolate values along columns
noise_interp[:, i] = np.interp(range(0, ydiff), yi_t, noise_interp[:, i][yi_t])
# read subset of image to array and subtract interpolated noise (denoising)
mat_master = outband_master.ReadAsArray(*[xmin, ymin, xdiff, ydiff])
denoisedBlock = mat_master.astype(float) ** 2 - noise_interp * scalingFactor
# mask out all pixels with a value below 0.5 in the denoised block or 30 in the original block
denoisedBlock[(denoisedBlock < 0.5) | (mat_master < 30)] = 0
denoisedBlock = np.sqrt(denoisedBlock)
if method == 'pyroSAR':
# helper functions for masking out negative values
def helper1(x):
return len(x) - np.argmax(x > 0)
def helper2(x):
return len(x) - np.argmax(x[::-1] > 0)
# mask out negative values and simplify borders (custom implementation)
if subset == (0, 0, blocksize, scene.lines):
border = np.apply_along_axis(helper1, 1, denoisedBlock)
border = blocksize - ls.reduce(border)
for j in range(0, ydiff):
denoisedBlock[j, :border[j]] = 0
denoisedBlock[j, border[j]:] = 1
elif subset == (0, scene.lines - blocksize, scene.samples, scene.lines):
border = np.apply_along_axis(helper2, 0, denoisedBlock)
border = ls.reduce(border)
for j in range(0, xdiff):
denoisedBlock[border[j]:, j] = 0
denoisedBlock[:border[j], j] = 1
elif subset == (scene.samples - blocksize, 0, scene.samples, scene.lines):
border = np.apply_along_axis(helper2, 1, denoisedBlock)
border = ls.reduce(border)
for j in range(0, ydiff):
denoisedBlock[j, border[j]:] = 0
denoisedBlock[j, :border[j]] = 1
elif subset == (0, 0, scene.samples, blocksize):
border = np.apply_along_axis(helper1, 0, denoisedBlock)
border = blocksize - ls.reduce(border)
for j in range(0, xdiff):
denoisedBlock[:border[j], j] = 0
denoisedBlock[border[j]:, j] = 1
mat_master[denoisedBlock == 0] = 0
# write modified array back to original file
outband_master.WriteArray(mat_master, xmin, ymin)
outband_master.FlushCache()
# perform reading, masking and writing for all other polarizations
for outband in outband_slaves:
mat = outband.ReadAsArray(*[xmin, ymin, xdiff, ydiff])
mat[denoisedBlock == 0] = 0
outband.WriteArray(mat, xmin, ymin)
outband.FlushCache()
# detach file links
outband_master = None
ras_master = None
for outband in outband_slaves:
outband = None
for ras in ras_slaves:
ras = None
================================================
FILE: pyroSAR/S1/linesimplify.py
================================================
###############################################################################
# Utilities for simplification of lines used by pyroSAR for border noise removal
# Copyright (c) 2017-2020, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
from osgeo import ogr
import numpy as np
from spatialist.ancillary import rescale
from .polysimplify import VWSimplifier
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.patches import Polygon
from matplotlib.collections import PatchCollection
matplotlib.rcParams['font.size'] = 12
def simplify(x, y, maxpoints=20):
x = list(map(float, x))
y = list(map(float, y))
pts = np.array(list(zip(x, y)))
simplifier = VWSimplifier(pts)
sqd = []
iter_range = range(2, maxpoints + 1)
for i in iter_range:
VWpts = simplifier.from_number(i)
xn, yn = zip(*VWpts)
out = np.sum((y - np.interp(x, xn, yn)) ** 2)
sqd.append(out)
# sqd /= max(sqd)
if min(sqd) == max(sqd):
VWpts = simplifier.from_number(2)
return VWpts
else:
sqd = rescale(sqd)
# plt.plot(sqd)
# plt.show()
# iter = (np.array(iter_range) - 2) / (maxpoints - 2.)
# plt.plot(iter_range, sqd, label='residual')
# plt.plot(iter_range, iter, color='r', label='iteration')
# plt.plot(iter_range, iter + sqd, color='g', label='residual+iteration')
# plt.legend(loc='upper center', shadow=True)
# plt.show()
# npoints = np.argmin(iter + sqd) + 2
npoints = np.argmax(np.array(sqd) < 0.01) + 2
VWpts = simplifier.from_number(npoints)
return VWpts
def createPoly(xn, yn, xmax, ymax, plot=False):
"""
create an OGR geometry from a sequence of indices
Parameters
----------
xn: numpy.ndarray
the x indices of the points
yn: numpy.ndarray
the y indices of the points
xmax: int or float
the maximum x index value
ymax: int or float
the maximum y index value
Returns
-------
osgeo.ogr.Geometry
"""
ring = ogr.Geometry(ogr.wkbLinearRing)
ring.AddPoint_2D(0, 0)
for item in zip(xn, yn):
item = list(map(int, item))
if item != [0, 0] and item != [xmax, ymax]:
ring.AddPoint_2D(item[0], item[1])
ring.AddPoint_2D(xmax, ymax)
ring.AddPoint_2D(xmax, 0)
ring.CloseRings()
poly = ogr.Geometry(ogr.wkbPolygon)
poly.AddGeometry(ring)
if plot:
fig, ax = plt.subplots()
pts = ring.GetPoints()
arr = np.array(pts)
polygon = Polygon(arr, True)
p = PatchCollection([polygon], cmap=matplotlib.cm.jet, alpha=0.4)
ax.add_collection(p)
ax.autoscale_view()
plt.scatter(arr[:, 0], arr[:, 1], s=10, color='red')
plt.show()
return poly
def reduce(seq, maxpoints=20, straighten=False, plot=False):
"""
reduce the complexity of a line; the following steps are performed:
- simplify the line using the Visvalingam-Whyatt method
- iteratively add points on the original line back to the simplified line
until the polygon spanned by the simplified line and (xmin, ymin) does not
contain any further points of the original line; the polygon area is
expected to only cover valid pixels of the image
- optionally further straighten the result for smoother edges
Parameters
----------
seq: numpy.ndarray
the 1D line sequence to be simplified
maxpoints: int
the maximum number points in the simplified sequence
straighten: bool
perform additional straightening on the simplified line?
plot: bool
plot the results?
Returns
-------
numpy.ndarray
the simplified line sequence
"""
if min(seq) == max(seq):
return np.array(seq)
x = list(range(0, len(seq)))
if plot:
plt.plot(seq, label='ESA-corrected')
# simplify the sequence using the Visvalingam-Whyatt algorithm
VWpts = simplify(x, seq, maxpoints)
xn, yn = [list(x) for x in zip(*VWpts)]
if plot:
plt.plot(xn, yn, linewidth=2, color='r', label='VW-simplified')
simple = np.interp(x, xn, yn)
# create a list of OGR points for the original border
points = []
for xi, yi in enumerate(seq):
point = ogr.Geometry(ogr.wkbPoint)
point.AddPoint(int(xi), int(yi))
points.append(point)
points = np.array(points)
while True:
# create a polygon containing all pixels inside the simplified border
# i.e., containing the area considered valid
poly = createPoly(xn, yn, seq.size, int(max(seq)))
# create an OGR line from the simplified border points
line = ogr.Geometry(ogr.wkbLineString)
for xi, yi in zip(xn, yn):
line.AddPoint(xi, yi)
# compute the distance of each original point to the simplified line
dists = np.array([line.Distance(point) for point in points])
# check which points are inside of the polygon
contain = np.array([point.Within(poly) for point in points])
# remove points outside the polygon and stop if
# no further points outside the polygon exist
dists[~contain] = 0
points = points[(dists > 0)]
dists = dists[(dists > 0)]
if len(dists) == 0:
break
# select the point with the largest distance to the simplified
# line and add it to the list of simplified points
# this reduces the size of the polygon an thus the area considered valid
candidate = points[np.argmax(dists)]
cp = candidate.GetPoint()
index = np.argmin(np.array(xn) < cp[0])
xn.insert(index, cp[0])
yn.insert(index, cp[1])
if plot:
plt.plot(xn, yn, linewidth=2, color='limegreen', label='corrected')
# further straighten the line segments
# def straight(xn, yn, VWpts):
# indices = [i for i in range(0, len(xn)) if (xn[i], yn[i]) in VWpts]
# log.info(indices)
# for i, j in enumerate(indices):
# if i < (len(indices) - 1):
# if indices[i + 1] > j + 1:
# dx = abs(xn[j] - xn[indices[i + 1]])
# dy = abs(yn[j] - yn[indices[i + 1]])
# if dx > dy:
# seg_y = yn[j:indices[i + 1] + 1]
# for k in range(j, indices[i + 1] + 1):
# yn[k] = min(seg_y)
# return yn
def straight(xn, yn, VWpts):
indices = [i for i in range(0, len(xn)) if (xn[i], yn[i]) in VWpts]
xn_new = []
yn_new = []
# make all line segments horizontal or vertical
for index in range(len(indices) - 1):
i = indices[index]
j = indices[index + 1]
ymin = min(yn[i:j + 1])
xn_new.extend([xn[i], xn[j]])
yn_new.extend([ymin, ymin])
# shift horizontal lines down if the preceding horizontal line has a lower y value
# but only if the shift is less than the tolerance
tolerance = 15
for i in range(len(xn_new) - 2):
if yn_new[i] == yn_new[i + 1]:
if yn_new[i] < yn_new[i + 2] and abs(yn_new[i] - yn_new[i + 2]) < tolerance:
yn_new[i + 2] = yn_new[i]
yn_new[i + 3] = yn_new[i]
elif (yn_new[i] > yn_new[i + 2]) \
and (yn_new[i + 2] == yn_new[i + 3]) \
and abs(yn_new[i] - yn_new[i + 2]) < tolerance:
yn_new[i] = yn_new[i + 2]
yn_new[i + 1] = yn_new[i + 2]
return xn_new, yn_new
if straighten:
xn, yn = straight(xn, yn, VWpts)
if plot:
plt.plot(xn, yn, linewidth=2, color='m', label='straightened')
if plot:
plt.legend()
plt.xlabel('row')
plt.ylabel('column')
plt.show()
return np.interp(x, xn, yn).astype(int)
================================================
FILE: pyroSAR/S1/polysimplify.py
================================================
"""
Visvalingam-Whyatt method of poly-line vertex reduction
Visvalingam, M and Whyatt J D (1993)
"Line Generalisation by Repeated Elimination of Points", Cartographic J., 30 (1), 46 - 51
Described here:
https://web.archive.org/web/20100428020453/http://www2.dcs.hull.ac.uk/CISRG/publications/DPs/DP10/DP10.html
=========================================
The MIT License (MIT)
Copyright (c) 2014 Elliot Hallmark
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================
code was obtained from https://github.com/Permafacture/Py-Visvalingam-Whyatt/blob/master/polysimplify.py
minor edits for Python3 compatibility by John Truckenbrodt 2019
"""
from numpy import array, argmin
import numpy as np
import logging
log = logging.getLogger(__name__)
def triangle_area(p1, p2, p3):
"""
calculates the area of a triangle given its vertices
"""
return abs(p1[0] * (p2[1] - p3[1]) + p2[0] * (p3[1] - p1[1]) + p3[0] * (p1[1] - p2[1])) / 2.
def triangle_areas_from_array(arr):
"""
take an (N,2) array of points and return an (N,1)
array of the areas of those triangles, where the first
and last areas are np.inf
see triangle_area for algorithm
"""
result = np.empty((len(arr),), arr.dtype)
result[0] = np.inf
result[-1] = np.inf
p1 = arr[:-2]
p2 = arr[1:-1]
p3 = arr[2:]
# an accumulators to avoid unnecessary intermediate arrays
accr = result[1:-1] # Accumulate directly into result
acc1 = np.empty_like(accr)
np.subtract(p2[:, 1], p3[:, 1], out=accr)
np.multiply(p1[:, 0], accr, out=accr)
np.subtract(p3[:, 1], p1[:, 1], out=acc1)
np.multiply(p2[:, 0], acc1, out=acc1)
np.add(acc1, accr, out=accr)
np.subtract(p1[:, 1], p2[:, 1], out=acc1)
np.multiply(p3[:, 0], acc1, out=acc1)
np.add(acc1, accr, out=accr)
np.abs(accr, out=accr)
accr /= 2.
# Notice: accr was writing into result, so the answer is in there
return result
# the final value in thresholds is np.inf, which will never be
# the min value. So, I am safe in "deleting" an index by
# just shifting the array over on top of it
def remove(s, i):
"""
Quick trick to remove an item from a numpy array without
creating a new object. Rather than the array shape changing,
the final value just gets repeated to fill the space.
~3.5x faster than numpy.delete
"""
s[i:-1] = s[i + 1:]
class VWSimplifier(object):
def __init__(self, pts):
"""
Initialize with points. takes some time to build
the thresholds but then all threshold filtering later
is ultra fast
"""
self.pts = np.array(pts)
self.thresholds = self.build_thresholds()
self.ordered_thresholds = sorted(self.thresholds, reverse=True)
def build_thresholds(self):
"""
compute the area value of each vertex, which one would
use to mask an array of points for any threshold value.
returns a numpy.array (length of pts) of the areas.
"""
pts = self.pts
nmax = len(pts)
real_areas = triangle_areas_from_array(pts)
real_indices = list(range(nmax))
# destructable copies
# ARG! areas=real_areas[:] doesn't make a copy!
areas = np.copy(real_areas)
i = real_indices[:]
# pick first point and set up for loop
min_vert = int(argmin(areas))
this_area = areas[min_vert]
# areas and i are modified for each point finished
remove(areas, min_vert) # faster
# areas = np.delete(areas,min_vert) #slower
real_idx = i.pop(min_vert)
# cntr = 3
while this_area < np.inf:
'''min_vert was removed from areas and i. Now,
adjust the adjacent areas and remove the new
min_vert.
Now that min_vert was filtered out, min_vert points
to the point after the deleted point.'''
skip = False # modified area may be the next minvert
try:
right_area = triangle_area(pts[i[min_vert - 1]],
pts[i[min_vert]], pts[i[min_vert + 1]])
except IndexError:
# trying to update area of endpoint. Don't do it
pass
else:
right_idx = i[min_vert]
if right_area <= this_area:
# even if the point now has a smaller area,
# it ultimately is not more significant than
# the last point, which needs to be removed
# first to justify removing this point.
# Though this point is the next most significant
right_area = this_area
# min_vert refers to the point to the right of
# the previous min_vert, so we can leave it
# unchanged if it is still the min_vert
skip = min_vert
# update both collections of areas
real_areas[right_idx] = right_area
areas[min_vert] = right_area
if min_vert > 1:
# cant try/except because 0-1=-1 is a valid index
left_area = triangle_area(pts[i[min_vert - 2]],
pts[i[min_vert - 1]], pts[i[min_vert]])
if left_area <= this_area:
# same justification as above
left_area = this_area
skip = min_vert - 1
real_areas[i[min_vert - 1]] = left_area
areas[min_vert - 1] = left_area
# only argmin if we have too.
min_vert = skip or argmin(areas)
real_idx = i.pop(min_vert)
this_area = areas[min_vert]
# areas = np.delete(areas,min_vert) #slower
remove(areas, min_vert) # faster
'''if sum(np.where(areas==np.inf)[0]) != sum(list(reversed(range(len(areas))))[:cntr]):
print "broke:",np.where(areas==np.inf)[0],cntr
break
cntr+=1
#if real_areas[0]= threshold]
def from_number(self, n):
thresholds = self.ordered_thresholds
try:
threshold = thresholds[int(n)]
except IndexError:
return self.pts
return self.pts[self.thresholds > threshold]
def from_ratio(self, r):
if r <= 0 or r > 1:
raise ValueError("Ratio must be 0 threshold],precision=precision)
return arr.replace('[[ ','(').replace(']]',')').replace(']\n [ ',',')
'''
def wkt_from_threshold(self, threshold, precision=None):
if precision:
self.set_precision(precision)
pts = self.pts_as_strs[self.thresholds >= threshold]
return '(%s)' % ','.join(['%s %s' % (x, y) for x, y in pts])
def wkt_from_number(self, n, precision=None):
thresholds = self.ordered_thresholds
if n < 3: n = 3 # For polygons. TODO something better
try:
threshold = thresholds[int(n)]
except IndexError:
threshold = 0
return self.wkt_from_threshold(threshold, precision=precision)
def wkt_from_ratio(self, r, precision=None):
if r <= 0 or r > 1:
raise ValueError("Ratio must be 0 413962.65495176613
gdalsimplifierpoly.area -> 413962.65495339036
"""
def __init__(self, geom, precision=None, return_GDAL=True):
"""
accepts a gdal.OGRGeometry or geos.GEOSGeometry
object and wraps multiple
VWSimplifiers. set return_GDAL to False for faster
filtering with arrays of floats returned instead of
geometry objects.
"""
global p
self.return_GDAL = return_GDAL
if isinstance(geom, OGRGeometry):
name = geom.geom_name
self.Geometry = lambda w: OGRGeometry(w, srs=geom.srs)
self.pts = np.array(geom.tuple)
elif isinstance(geom, GEOSGeometry):
name = geom.geom_type.upper()
self.Geometry = lambda w: fromstr(w)
self.pts = np.array(geom.tuple)
elif isinstance(geom, str) or isinstance(geom, bytes):
# assume wkt
# for WKT
def str2tuple(q):
return '(%s,%s)' % (q.group(1), q.group(2))
self.return_GDAL = False # don't even try
self.Geometry = lambda w: w # this will never be used
name, pts = geom.split(' ', 1)
self.pts = loads(p.sub(str2tuple, pts). \
replace('(', '[').replace(')', ']'))
self.precision = precision
if name == 'LINESTRING':
self.maskfunc = self.linemask
self.buildfunc = self.linebuild
self.fromnumfunc = self.notimplemented
elif name == "POLYGON":
self.maskfunc = self.polymask
self.buildfunc = self.polybuild
self.fromnumfunc = self.notimplemented
elif name == "MULTIPOLYGON":
self.maskfunc = self.multimask
self.buildfunc = self.multibuild
self.fromnumfunc = self.notimplemented
else:
raise RuntimeError("""
Only types LINESTRING, POLYGON and MULTIPOLYGON
supported, but got %s""" % name)
# sets self.simplifiers to a list of VWSimplifiers
self.buildfunc()
# rather than concise, I'd rather be explicit and clear.
def pt2str(self, pt):
"""make length 2 numpy.array.__str__() fit for wkt"""
return ' '.join(pt)
def linebuild(self):
self.simplifiers = [WKTSimplifier(self.pts)]
def line2wkt(self, pts):
return u'LINESTRING %s' % pts
def linemask(self, threshold):
get_pts = self.get_pts
pts = get_pts(self.simplifiers[0], threshold)
if self.return_GDAL:
return self.Geometry(self.line2wkt(pts))
else:
return pts
def polybuild(self):
list_of_pts = self.pts
result = []
for pts in list_of_pts:
result.append(WKTSimplifier(pts))
self.simplifiers = result
def poly2wkt(self, list_of_pts):
return u'POLYGON (%s)' % ','.join(list_of_pts)
def polymask(self, threshold):
get_pts = self.get_pts
sims = self.simplifiers
list_of_pts = [get_pts(sim, threshold) for sim in sims]
if self.return_GDAL:
return self.Geometry(self.poly2wkt(list_of_pts))
else:
return array(list_of_pts)
def multibuild(self):
list_of_list_of_pts = self.pts
result = []
for list_of_pts in list_of_list_of_pts:
subresult = []
for pts in list_of_pts:
subresult.append(WKTSimplifier(pts))
result.append(subresult)
self.simplifiers = result
def multi2wkt(self, list_of_list_of_pts):
outerlist = []
for list_of_pts in list_of_list_of_pts:
outerlist.append('(%s)' % ','.join(list_of_pts))
return u'MULTIPOLYGON (%s)' % ','.join(outerlist)
def multimask(self, threshold):
loflofsims = self.simplifiers
result = []
get_pts = self.get_pts
if self.return_GDAL:
ret_func = lambda r: self.Geometry(self.multi2wkt(r))
else:
ret_func = lambda r: r
for list_of_simplifiers in loflofsims:
subresult = []
for simplifier in list_of_simplifiers:
subresult.append(get_pts(simplifier, threshold))
result.append(subresult)
return ret_func(result)
def notimplemented(self, n):
log.info('This function is not yet implemented')
def from_threshold(self, threshold):
precision = self.precision
if self.return_GDAL:
self.get_pts = lambda obj, t: obj.wkt_from_threshold(t, precision)
else:
self.get_pts = lambda obj, t: obj.from_threshold(t)
return self.maskfunc(threshold)
def from_number(self, n):
precision = self.precision
if self.return_GDAL:
self.get_pts = lambda obj, t: obj.wkt_from_number(t, precision)
else:
self.get_pts = lambda obj, t: obj.from_number(t)
return self.maskfunc(n)
def from_ratio(self, r):
precision = self.precision
if self.return_GDAL:
self.get_pts = lambda obj, t: obj.wkt_from_ratio(t, precision)
else:
self.get_pts = lambda obj, t: obj.from_ratio(t)
return self.maskfunc(r)
def fancy_parametric(k):
"""
good k's: .33,.5,.65,.7,1.3,1.4,1.9,3,4,5
"""
cos = np.cos
sin = np.sin
xt = lambda t: (k - 1) * cos(t) + cos(t * (k - 1))
yt = lambda t: (k - 1) * sin(t) - sin(t * (k - 1))
return xt, yt
if __name__ == "__main__":
from time import time
n = 5000
thetas = np.linspace(0, 16 * np.pi, n)
xt, yt = fancy_parametric(1.4)
pts = np.array([[xt(t), yt(t)] for t in thetas])
start = time()
simplifier = VWSimplifier(pts)
pts = simplifier.from_number(1000)
end = time()
log.info("%s vertices removed in %02f seconds" % (n - len(pts), end - start))
import matplotlib
matplotlib.use('AGG')
import matplotlib.pyplot as plot
plot.plot(pts[:, 0], pts[:, 1], color='r')
plot.savefig('visvalingam.png')
log.info("saved visvalingam.png")
# plot.show()
================================================
FILE: pyroSAR/__init__.py
================================================
from .drivers import *
from .archive import Archive, drop_archive
from . import ancillary, drivers
from importlib.metadata import version, PackageNotFoundError
try:
__version__ = version(__name__)
except PackageNotFoundError:
# package is not installed
pass
================================================
FILE: pyroSAR/ancillary.py
================================================
###############################################################################
# ancillary routines for software pyroSAR
# Copyright (c) 2014-2026, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
"""
This module gathers central functions and classes for general pyroSAR applications.
"""
import os
import re
import time
import uuid
from pathlib import Path
from math import sin, radians
import inspect
from datetime import datetime
from . import patterns
from spatialist.ancillary import finder
from dataclasses import dataclass
from typing import Optional, Literal, Callable, Any
try:
from typing import Self
except ImportError:
# Python < 3.11
from typing_extensions import Self
from types import TracebackType
import logging
log = logging.getLogger(__name__)
def groupby(
images: list[str],
attribute: str
) -> list[list[str]]:
"""
group a list of images by a metadata attribute
Parameters
----------
images:
the names of the images to be sorted
attribute:
the name of the attribute used for sorting;
see :func:`parse_datasetname` for options
Returns
-------
a list of sub-lists containing the grouped images
"""
images_sort = sorted(images, key=lambda x: re.search(patterns.pyrosar, x).group(attribute))
out_meta = [[parse_datasetname(images_sort.pop(0))]]
while len(images_sort) > 0:
filename = images_sort.pop(0)
meta = parse_datasetname(filename)
if out_meta[-1][0][attribute] == meta[attribute]:
out_meta[-1].append(meta)
else:
out_meta.append([meta])
out = [[x['filename'] for x in y] for y in out_meta]
return out
def groupbyTime(
images: list[str],
function: Callable[[str], Any],
time: int | float
) -> list[list[str]]:
"""
function to group images by their acquisition time difference
Parameters
----------
images:
a list of image names
function:
a function to derive the time from the image names; see e.g. :func:`seconds`
time:
a time difference in seconds by which to group the images
Returns
-------
a list of sub-lists containing the grouped images
"""
# sort images by time stamp
srcfiles = sorted(images, key=function)
groups = [[srcfiles[0]]]
group = groups[0]
for i in range(1, len(srcfiles)):
item = srcfiles[i]
timediff = abs(function(item) - function(group[-1]))
if timediff <= time:
group.append(item)
else:
groups.append([item])
group = groups[-1]
return [x[0] if len(x) == 1 else x for x in groups]
def multilook_factors(
source_rg: int | float,
source_az: int | float,
target: int | float,
geometry: Literal["SLANT_RANGE", "GROUND_RANGE"],
incidence: int | float
) -> tuple[int, int]:
"""
Compute multi-looking factors. A square pixel is approximated with
defined target ground range pixel spacing. The function computes a
cost for multilook factor combinations based on the difference between
the resulting spacing and the target spacing for range and azimuth
respectively and the difference between range and azimuth spacing.
Based on this cost, the optimal multilook factors are chosen.
Each of the three criteria is weighted equally.
Parameters
----------
source_rg:
the range pixel spacing
source_az:
the azimuth pixel spacing
target:
the target pixel spacing of an approximately square pixel
geometry:
the imaging geometry; either 'SLANT_RANGE' or 'GROUND_RANGE'
incidence:
the angle of incidence in degrees
Returns
-------
the multi-looking factors as (range looks, azimuth looks)
Examples
--------
>>> from pyroSAR.ancillary import multilook_factors
>>> rlks, azlks = multilook_factors(source_rg=2, source_az=13, target=10,
>>> geometry='SLANT_RANGE', incidence=39)
>>> print(rlks, azlks)
4 1
"""
@dataclass
class MultilookResult:
rglks: int
azlks: int
cost: float
sp_az = source_az
if geometry == 'SLANT_RANGE':
sp_rg = source_rg / sin(radians(incidence))
elif geometry == 'GROUND_RANGE':
sp_rg = source_rg
else:
raise ValueError("parameter 'geometry' must be either "
"'SLANT_RANGE' or 'GROUND_RANGE'")
sp_target = max(sp_az, sp_rg, target)
# determine initial ML factors
rglks_init = int(round(sp_target / sp_rg))
azlks_init = int(round(sp_target / sp_az))
best: Optional[MultilookResult] = None
# weights for the distance criteria
w_rg = 1.
w_az = 1.
w_sq = 1.
# iterate over some range of ML factors to find the best
# combination.
for rglks in range(1, rglks_init + 6):
sp_rg_out = sp_rg * rglks
for azlks in range(1, azlks_init + 6):
sp_az_out = sp_az * azlks
# compute distances and cost
d_rg = abs(sp_rg_out - sp_target)
d_az = abs(sp_az_out - sp_target)
d_sq = abs(sp_rg_out - sp_az_out)
cost = w_rg * d_rg + w_az * d_az + w_sq * d_sq
candidate = MultilookResult(
rglks=rglks,
azlks=azlks,
cost=cost,
)
if best is None:
best = candidate
else:
# primary: minimize cost
if candidate.cost < best.cost:
best = candidate
# secondary: minimize rglks+azlks
elif candidate.cost == best.cost:
if (candidate.rglks + candidate.azlks) < (best.rglks + best.azlks):
best = candidate
rglks = best.rglks
azlks = best.azlks
log.debug(f'ground range spacing: ({sp_rg * rglks}, {sp_az * azlks})')
return rglks, azlks
def seconds(filename: str) -> float:
"""
function to extract time in seconds from a file name.
the format must follow a fixed pattern: YYYYmmddTHHMMSS
Images processed with pyroSAR functionalities via module snap or gamma will contain this information.
Parameters
----------
filename:
the name of a file from which to extract the time from
Returns
-------
the difference between the time stamp in filename and Jan 01 1900 in seconds
"""
# return mktime(strptime(re.findall('[0-9T]{15}', filename)[0], '%Y%m%dT%H%M%S'))
td = datetime.strptime(re.findall('[0-9T]{15}', filename)[0], '%Y%m%dT%H%M%S') - datetime(1900, 1, 1)
return td.total_seconds()
def parse_datasetname(
name: str,
parse_date: bool = False
) -> dict[str, str | None | list[str] | datetime] | None:
"""
Parse the name of a pyroSAR processing product
Parameters
----------
name:
the name of the file to be parsed
parse_date:
parse the start date to a :class:`~datetime.datetime`
object or just return the string?
Returns
-------
the metadata attributes parsed from the file name or
None if the file name does not match the pattern.
Examples
--------
>>> meta = parse_datasetname('S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif')
>>> print(sorted(meta.keys()))
['acquisition_mode', 'extensions', 'filename', 'orbit',
'outname_base', 'polarization', 'proc_steps', 'sensor', 'start']
"""
filename = os.path.abspath(name) if os.path.isfile(name) else name
match = re.match(re.compile(patterns.pyrosar), filename)
if not match:
return
out = match.groupdict()
if out['extensions'] == '':
out['extensions'] = None
if out['proc_steps'] is not None:
out['proc_steps'] = out['proc_steps'].split('_')
if parse_date:
out['start'] = datetime.strptime(out['start'], '%Y%m%dT%H%M%S')
out['filename'] = filename
out['outname_base'] = out['outname_base'].strip('_')
return out
def find_datasets(
directory: str,
recursive: bool = False,
**kwargs
) -> list[str]:
"""
find pyroSAR datasets in a directory based on their metadata
Parameters
----------
directory:
the name of the directory to be searched
recursive:
search the directory recursively into subdirectories?
kwargs:
Metadata attributes for filtering the scene list supplied as `key=value`. e.g. `sensor='S1A'`.
Multiple allowed options can be provided in tuples, e.g. `sensor=('S1A', 'S1B')`.
Any types other than tuples require an exact match, e.g. `proc_steps=['grd', 'mli', 'geo', 'norm', 'db']`
will be matched only if these processing steps are contained in the product name in this exact order.
The special attributes `start` and `stop` can be used for time filtering where `start<=value<=stop`.
See function :func:`parse_datasetname` for further options.
Returns
-------
the file names found in the directory and filtered by metadata attributes
Examples
--------
>>> selection = find_datasets('path/to/files', sensor=('S1A', 'S1B'), polarization='VV')
"""
files = finder(directory, [patterns.pyrosar], regex=True, recursive=recursive)
selection = []
for file in files:
meta = parse_datasetname(file)
matches = []
for key, val in kwargs.items():
if key == 'start':
match = val <= meta['start']
elif key == 'stop':
match = val >= meta['start'] # only the start time stamp is contained in the filename
elif isinstance(val, tuple):
match = meta[key] in val
else:
match = meta[key] == val
matches.append(match)
if all(matches):
selection.append(file)
return selection
def getargs(func: Callable[..., Any]) -> list[str]:
"""
get the arguments of a function
Parameters
----------
func:
the function to be checked
Returns
-------
the argument names
"""
return sorted(inspect.getfullargspec(func).args)
def hasarg(func: Callable[..., Any], arg: str) -> bool:
"""
simple check whether a function takes a parameter as input
Parameters
----------
func:
the function to be checked
arg:
the argument name to be found
Returns
-------
does the function take this as argument?
"""
return arg in getargs(func)
def windows_fileprefix(
func: Callable[[str], object],
path: str,
exc_info: tuple[type[BaseException], BaseException, TracebackType | None],
) -> None:
"""
Helper function for :func:`shutil.rmtree` to exceed Windows' file name length limit of 256 characters.
See `here `_ for details.
Parameters
----------
func:
the function to be executed, i.e. :func:`shutil.rmtree`
path:
the path to be deleted
exc_info:
execution info as returned by :func:`sys.exc_info`
Returns
-------
Examples
--------
>>> import shutil
>>> from pyroSAR.ancillary import windows_fileprefix
>>> shutil.rmtree('/path', onerror=windows_fileprefix)
"""
func(u'\\\\?\\' + path)
class Lock(object):
"""
File and folder locking mechanism.
This mechanism creates lock files indicating whether a file/folder
1. is being modified (`target`.lock),
2. is being used/read (`target`.used_) or
3. was damaged during modification (`target`.error).
Although these files will not prevent locking by other mechanisms (UNIX
locks are generally only advisory), this mechanism is respected across
any running instances. I.e., if such a lock file exists, no process
trying to acquire a lock using this class will succeed if a lock file
intending to prevent it exists. This was implemented because other existing
solutions like `filelock `_ or
`fcntl `_ do not implement
effective solutions for parallel jobs in HPC systems.
Hard locks prevent any usage of the data. Damage/error locks work like hard
locks except that `timeout` is ignored and a `RuntimeError` is raised immediately.
Error locks are created if an error occurs whilst a hard lock is acquired and
`target` exists (by renaming the hard lock file).
Infinite usage locks may exist, each with a different random UUID. No hard
lock may be acquired whilst usage locks exist. On error usage locks are simply
deleted.
The class supports nested locks. One function might lock a file, and another
function called in the same process will reuse this lock if it tries to lock
the file.
It may happen that lock files remain when a process is killed by HPC schedulers
like Slurm because in this case the process is not ended by Python. Optimally,
hard locks should be renamed to error lock files and usage lock files should be
deleted. This has to be done separately.
Examples
--------
>>> from pyroSAR.ancillary import Lock
>>> target = 'test.txt'
>>> with Lock(target=target):
>>> with open(target, 'w') as f:
>>> f.write('Hello World!')
>>> with Lock(target=target): # initialize lock
>>> with Lock(target=target): # reuse lock
>>> with open(target, 'w') as f:
>>> f.write('Hello World!')
Parameters
----------
target:
the file/folder to lock
soft:
lock the file/folder only for reading (and not for modification)?
timeout:
the time in seconds to retry acquiring a lock
"""
_instances = {}
_nesting_levels = {}
def __new__(
cls,
target: str,
soft: bool = False,
timeout: int = 7200
) -> Self:
target_abs = os.path.abspath(os.path.expanduser(target))
if target_abs not in cls._instances:
log.debug(f'creating lock instance for target {target_abs}')
instance = super().__new__(cls)
cls._instances[target_abs] = instance
cls._nesting_levels[target_abs] = 0
else:
if soft != cls._instances[target_abs].soft:
msg = 'cannot place nested {}-lock on existing {}-lock for target {}'
vals = ['read', 'write'] if soft else ['write', 'read']
vals.append(target_abs)
raise RuntimeError(msg.format(*vals))
log.debug(f'reusing lock instance for target {target_abs}')
return cls._instances[target_abs]
def __init__(
self,
target: str,
soft: bool = False,
timeout: int = 7200
) -> None:
if not hasattr(self, '_initialized'):
self.target = os.path.abspath(os.path.expanduser(target))
used_id = str(uuid.uuid4())
self.lock = self.target + '.lock'
self.error = self.target + '.error'
self.used = self.target + f'.used_{used_id}'
self.soft = soft
if os.path.isfile(self.error):
msg = 'cannot acquire lock on damaged target: {}'
raise RuntimeError(msg.format(self.target))
end = time.time() + timeout
log.debug(f'trying to {"read" if self.soft else "write"}-lock {target}')
while True:
if time.time() > end:
msg = 'could not acquire lock due to timeout: {}'
raise RuntimeError(msg.format(self.target))
try:
if self.soft and not os.path.isfile(self.lock):
Path(self.used).touch(exist_ok=False)
break
if not self.soft and not self.is_used():
Path(self.lock).touch(exist_ok=False)
break
except FileExistsError:
pass
time.sleep(1)
log.debug(f'acquired {"read" if self.soft else "write"}-lock on {target}')
self._initialized = True
Lock._nesting_levels[self.target] += 1
def __enter__(self) -> Self:
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
self.remove(exc_type)
def is_used(self) -> bool:
"""
Does any usage lock exist?
"""
base = os.path.basename(self.target)
folder = os.path.dirname(self.target)
files = list(Path(folder).glob(base + '.used*'))
return len(files) > 0
def remove(
self,
exc_type: type[BaseException] | None = None
) -> None:
"""
Remove the acquired soft/hard lock or rename it to an error lock.
"""
Lock._nesting_levels[self.target] -= 1
if Lock._nesting_levels[self.target] == 0:
if not self.soft and exc_type is not None and os.path.exists(self.target):
os.rename(self.lock, self.error)
log.debug(f'placed error-lock on {self.target}')
else:
if self.soft:
os.remove(self.used)
else:
os.remove(self.lock)
msg_sub = "read" if self.soft else "write"
log.debug(f'removed {msg_sub}-lock on {self.target}')
del Lock._instances[self.target]
del Lock._nesting_levels[self.target]
else:
log.debug(f'decrementing lock level on {self.target}')
class LockCollection(object):
"""
Like :class:`Lock` but for multiple files/folders.
Parameters
----------
targets:
the files/folders to lock
soft:
lock the files/folders only for reading (and not for modification)?
timeout:
the time in seconds to retry acquiring a lock
"""
def __init__(
self,
targets: list[str],
soft: bool = False,
timeout: int = 7200
):
self.locks = [Lock(x, soft=soft, timeout=timeout) for x in targets]
def __enter__(self) -> Self:
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
for lock in reversed(self.locks):
lock.__exit__(exc_type, exc_value, traceback)
================================================
FILE: pyroSAR/archive.py
================================================
###############################################################################
# Scene database tools for pyroSAR
# Copyright (c) 2016-2026, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
from __future__ import annotations
import os
import re
import gc
import shutil
import sys
import socket
import time
import platform
import logging
import csv
import inspect
from datetime import datetime
import progressbar as pb
from types import TracebackType
from typing import Any, Protocol, runtime_checkable, Literal
from osgeo import gdal
from spatialist import sqlite3
from spatialist.vector import Vector
from spatialist.ancillary import finder
from pyroSAR.drivers import identify, identify_many, ID
from sqlalchemy import create_engine, Table, MetaData, Column, Integer, String, exc
from sqlalchemy import inspect as sql_inspect
from sqlalchemy.event import listen
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import select, func
from sqlalchemy.engine.url import URL
from sqlalchemy.ext.automap import automap_base
from sqlalchemy_utils import database_exists, create_database, drop_database
from geoalchemy2 import Geometry
log = logging.getLogger(__name__)
gdal.UseExceptions()
@runtime_checkable
class SceneArchive(Protocol):
"""
Common interface for scene archive backends.
Implementations may represent local databases, STAC catalogs, remote APIs,
or other scene repositories, but should expose a consistent `select`
method and support context-manager usage.
"""
def __enter__(self) -> SceneArchive:
"""
Enter the archive context.
"""
...
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
"""
Exit the archive context and release resources if necessary.
"""
...
def close(self) -> None:
"""
Release open resources.
Implementations that do not hold resources may implement this as a no-op.
"""
...
@staticmethod
def select(
sensor: str | list[str] | None = None,
product: str | list[str] | None = None,
acquisition_mode: str | list[str] | None = None,
mindate: str | datetime | None = None,
maxdate: str | datetime | None = None,
vectorobject: Vector | None = None,
date_strict: bool = True,
return_value: str | list[str] = "scene"
) -> list[Any]:
"""
Select scenes matching the query parameters.
Parameters
----------
sensor:
One sensor or a list of sensors.
product:
One product type or a list of product types.
acquisition_mode:
One acquisition mode or a list of acquisition modes.
mindate:
Minimum acquisition date/time.
maxdate:
Maximum acquisition date/time.
vectorobject:
Spatial search geometry.
date_strict:
Whether date filtering should be strict.
return_value:
One return field or a list of return fields.
**kwargs:
Backend-specific optional query arguments.
Returns
-------
The query result. Implementations may return a list of scalar values or
tuples depending on `return_value`.
"""
...
class Archive(SceneArchive):
"""
Utility for storing SAR image metadata in a database
Parameters
----------
dbfile:
the filename for the SpatiaLite database. This might either point to an
existing database or will be created otherwise. If postgres is set to True,
this will be the name for the PostgreSQL database.
custom_fields:
a dictionary containing additional non-standard database column names and data types;
the names must be attributes of the SAR scenes to be inserted (i.e. id.attr) or keys in their meta attribute
(i.e. id.meta['attr'])
postgres:
enable postgres driver for the database. Default: False
user:
required for postgres driver: username to access the database. Default: 'postgres'
password:
required for postgres driver: password to access the database. Default: '1234'
host:
required for postgres driver: host where the database is hosted. Default: 'localhost'
port:
required for postgres driver: port number to the database. Default: 5432
cleanup:
check whether all registered scenes exist and remove missing entries?
legacy:
open an outdated database in legacy mode to import into a new database.
Opening an outdated database without legacy mode will throw a RuntimeError.
Examples
----------
Ingest all Sentinel-1 scenes in a directory and its subdirectories into the database:
>>> from pyroSAR import Archive, identify
>>> from spatialist.ancillary import finder
>>> dbfile = '/.../scenelist.db'
>>> archive_s1 = '/.../sentinel1/GRD'
>>> scenes_s1 = finder(archive_s1, [r'^S1.*.zip'], regex=True, recursive=True)
>>> with Archive(dbfile) as archive:
>>> archive.insert(scenes_s1)
select all Sentinel-1 A/B scenes stored in the database, which
* overlap with a test site
* were acquired in Ground-Range-Detected (GRD) Interferometric Wide Swath (IW) mode before 2018
* contain a VV polarization image
* have not been processed to directory `outdir` before
>>> from pyroSAR import Archive
>>> from spatialist import Vector
>>> archive = Archive('/.../scenelist.db')
>>> site = Vector('/path/to/site.shp')
>>> outdir = '/path/to/processed/results'
>>> maxdate = '20171231T235959'
>>> selection_proc = archive.select(vectorobject=site, processdir=outdir,
>>> maxdate=maxdate, sensor=['S1A', 'S1B'],
>>> product='GRD', acquisition_mode='IW', vv=1)
>>> archive.close()
Alternatively, the `with` statement can be used.
In this case to just check whether one particular scene is already registered in the database:
>>> from pyroSAR import identify, Archive
>>> scene = identify('S1A_IW_SLC__1SDV_20150330T170734_20150330T170801_005264_006A6C_DA69.zip')
>>> with Archive('/.../scenelist.db') as archive:
>>> print(archive.is_registered(scene.scene))
When providing 'postgres' as driver, a PostgreSQL database will be created at a given host.
Additional arguments are required.
>>> from pyroSAR import Archive, identify
>>> from spatialist.ancillary import finder
>>> dbfile = 'scenelist_db'
>>> archive_s1 = '/.../sentinel1/GRD'
>>> scenes_s1 = finder(archive_s1, [r'^S1.*.zip'], regex=True, recursive=True)
>>> with Archive(dbfile, driver='postgres', user='user', password='password', host='host', port=5432) as archive:
>>> archive.insert(scenes_s1)
Importing an old database:
>>> from pyroSAR import Archive
>>> db_new = 'scenes.db'
>>> db_old = 'scenes_old.db'
>>> with Archive(db_new) as db:
>>> with Archive(db_old, legacy=True) as db_old:
>>> db.import_outdated(db_old)
"""
def __init__(
self,
dbfile: str,
custom_fields: dict[str, Any] | None = None,
postgres: bool = False,
user: str = 'postgres',
password: str = '1234',
host: str = 'localhost',
port: int = 5432,
cleanup: bool = True,
legacy: bool = False
):
if dbfile.endswith('.csv'):
raise RuntimeError("Please create a new Archive database and import the"
"CSV file using db.import_outdated('.csv').")
# check for driver, if postgres then check if server is reachable
if not postgres:
self.driver = 'sqlite'
dirname = os.path.dirname(os.path.abspath(dbfile))
w_ok = os.access(dirname, os.W_OK)
if not w_ok:
raise RuntimeError('cannot write to directory {}'.format(dirname))
# catch if .db extension is missing
root, ext = os.path.splitext(dbfile)
if len(ext) == 0:
dbfile = root + '.db'
else:
self.driver = 'postgresql'
if not self.__check_host(host, port):
sys.exit('Server not found!')
connect_args = {}
# create dict, with which a URL to the db is created
if self.driver == 'sqlite':
self.url_dict = {'drivername': self.driver,
'database': dbfile,
'query': {'charset': 'utf8'}}
if self.driver == 'postgresql':
self.url_dict = {'drivername': self.driver,
'username': user,
'password': password,
'host': host,
'port': port,
'database': dbfile}
connect_args = {
'keepalives': 1,
'keepalives_idle': 30,
'keepalives_interval': 10,
'keepalives_count': 5}
# create engine, containing URL and driver
log.debug('starting DB engine for {}'.format(URL.create(**self.url_dict)))
self.url = URL.create(**self.url_dict)
# https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS
self.engine = create_engine(url=self.url, echo=False,
connect_args=connect_args)
# call to __load_spatialite() for sqlite, to load mod_spatialite via event handler listen()
if self.driver == 'sqlite':
log.debug('loading spatialite extension')
listen(target=self.engine, identifier='connect', fn=self.__load_spatialite)
# check if loading was successful
try:
with self.engine.begin() as conn:
version = conn.execute('SELECT spatialite_version();')
except exc.OperationalError:
raise RuntimeError('could not load spatialite extension')
# if database is new, (create postgres-db and) enable spatial extension
if not database_exists(self.engine.url):
if self.driver == 'postgresql':
log.debug('creating new PostgreSQL database')
create_database(self.engine.url)
log.debug('enabling spatial extension for new database')
with self.engine.begin() as conn:
if self.driver == 'sqlite':
conn.execute(select([func.InitSpatialMetaData(1)]))
else:
conn.exec_driver_sql('CREATE EXTENSION IF NOT EXISTS postgis;')
# create Session (ORM) and get metadata
self.Session = sessionmaker(bind=self.engine)
self.meta = MetaData(self.engine)
self.custom_fields = custom_fields
# load or create tables
self.__init_data_table()
self.__init_duplicates_table()
msg = ("the 'data' table is missing {}. Please create a new database "
"and import the old one opened in legacy mode using "
"Archive.import_outdated.")
pk = sql_inspect(self.data_schema).primary_key
if 'product' not in pk.columns.keys() and not legacy:
raise RuntimeError(msg.format("a primary key 'product'"))
if 'geometry' not in self.get_colnames() and not legacy:
raise RuntimeError(msg.format("the 'geometry' column"))
self.Base = automap_base(metadata=self.meta)
self.Base.prepare(self.engine, reflect=True)
self.Data = self.Base.classes.data
self.Duplicates = self.Base.classes.duplicates
self.dbfile = dbfile
if cleanup:
log.info('checking for missing scenes')
self.cleanup()
sys.stdout.flush()
def add_tables(
self,
tables: Table | list[Table],
) -> None:
"""
Add tables to the database per :class:`sqlalchemy.schema.Table`
Tables provided here will be added to the database.
.. note::
Columns using Geometry must have setting management=True for SQLite,
for example: ``geometry = Column(Geometry('POLYGON', management=True, srid=4326))``
Parameters
----------
tables:
The table(s) to be added to the database.
"""
created = []
if isinstance(tables, list):
for table in tables:
table.metadata = self.meta
if not sql_inspect(self.engine).has_table(str(table)):
table.create(self.engine)
created.append(str(table))
else:
table = tables
table.metadata = self.meta
if not sql_inspect(self.engine).has_table(str(table)):
table.create(self.engine)
created.append(str(table))
log.info('created table(s) {}.'.format(', '.join(created)))
self.Base = automap_base(metadata=self.meta)
self.Base.prepare(self.engine, reflect=True)
def __init_data_table(self) -> None:
if sql_inspect(self.engine).has_table('data'):
self.data_schema = Table('data', self.meta, autoload_with=self.engine)
return
log.debug("creating DB table 'data'")
self.data_schema = Table('data', self.meta,
Column('sensor', String),
Column('orbit', String),
Column('orbitNumber_abs', Integer),
Column('orbitNumber_rel', Integer),
Column('cycleNumber', Integer),
Column('frameNumber', Integer),
Column('acquisition_mode', String),
Column('start', String),
Column('stop', String),
Column('product', String, primary_key=True),
Column('samples', Integer),
Column('lines', Integer),
Column('outname_base', String, primary_key=True),
Column('scene', String),
Column('hh', Integer),
Column('vv', Integer),
Column('hv', Integer),
Column('vh', Integer),
Column('geometry', Geometry(geometry_type='POLYGON',
management=True, srid=4326)))
# add custom fields
if self.custom_fields is not None:
for key, val in self.custom_fields.items():
if val in ['Integer', 'integer', 'int']:
self.data_schema.append_column(Column(key, Integer))
elif val in ['String', 'string', 'str']:
self.data_schema.append_column(Column(key, String))
else:
log.info('Value in dict custom_fields must be "integer" or "string"!')
self.data_schema.create(self.engine)
def __init_duplicates_table(self) -> None:
# create tables if not existing
if sql_inspect(self.engine).has_table('duplicates'):
self.duplicates_schema = Table('duplicates', self.meta, autoload_with=self.engine)
return
log.debug("creating DB table 'duplicates'")
self.duplicates_schema = Table('duplicates', self.meta,
Column('outname_base', String, primary_key=True),
Column('scene', String, primary_key=True))
self.duplicates_schema.create(self.engine)
@staticmethod
def __load_spatialite(dbapi_conn: sqlite3.Connection, connection_record: Any) -> None:
"""
loads the spatialite extension for SQLite, not to be used outside the init()
Parameters
----------
dbapi_conn:
db engine
connection_record:
not sure what it does, but it is needed by :func:`sqlalchemy.event.listen`
"""
dbapi_conn.enable_load_extension(True)
# check which platform and use according mod_spatialite
if platform.system() == 'Linux':
for option in ['mod_spatialite', 'mod_spatialite.so']:
try:
dbapi_conn.load_extension(option)
except sqlite3.OperationalError:
continue
elif platform.system() == 'Darwin':
for option in ['mod_spatialite.so', 'mod_spatialite.7.dylib',
'mod_spatialite.dylib']:
try:
dbapi_conn.load_extension(option)
except sqlite3.OperationalError:
continue
else:
dbapi_conn.load_extension('mod_spatialite')
def __prepare_insertion(self, scene: str | ID) -> Any:
"""
read scene metadata and parse a string for inserting it into the database
Parameters
----------
scene:
a SAR scene
Returns
-------
object of class Data
"""
id = scene if isinstance(scene, ID) else identify(scene)
pols = [x.lower() for x in id.polarizations]
# insertion as an object of Class Data (reflected in the init())
insertion = self.Data()
colnames = self.get_colnames()
for attribute in colnames:
if attribute == 'geometry':
geom = id.geometry()
geom.reproject(4326)
geom = geom.convert2wkt(set3D=False)[0]
geom = 'SRID=4326;' + str(geom)
# set attributes of the Data object according to input
setattr(insertion, 'geometry', geom)
elif attribute in ['hh', 'vv', 'hv', 'vh']:
setattr(insertion, attribute, int(attribute in pols))
else:
if hasattr(id, attribute):
attr = getattr(id, attribute)
elif attribute in id.meta.keys():
attr = id.meta[attribute]
else:
raise AttributeError('could not find attribute {}'.format(attribute))
value = attr() if inspect.ismethod(attr) else attr
setattr(insertion, str(attribute), value)
return insertion # return the Data object
def __select_missing(self, table: str) -> list[str]:
"""
Parameters
----------
table:
the name of the table
Returns
-------
the names of all scenes, which are no longer stored in their registered location
"""
with self.Session() as session:
if table == 'data':
# using ORM query to get all scenes locations
scenes = session.query(self.Data.scene)
elif table == 'duplicates':
scenes = session.query(self.Duplicates.scene)
else:
raise ValueError("parameter 'table' must either be 'data' or 'duplicates'")
files = [self.to_str(x[0]) for x in scenes]
return [x for x in files if not os.path.isfile(x)]
def insert(
self,
scene_in: str | ID | list[str | ID],
pbar: bool = False,
test: bool = False
) -> None:
"""
Insert one or many scenes into the database
Parameters
----------
scene_in:
a SAR scene or a list of scenes to be inserted
pbar:
show a progress bar?
test:
should the insertion only be tested or directly be committed to the database?
"""
if isinstance(scene_in, (ID, str)):
scene_in = [scene_in]
if not isinstance(scene_in, list):
raise RuntimeError('scene_in must either be a string pointing to a file, a pyroSAR.ID object '
'or a list containing several of either')
log.info('filtering scenes by name')
scenes = self.filter_scenelist(scene_in)
if len(scenes) == 0:
log.info('...nothing to be done')
return
log.info('identifying scenes and extracting metadata')
scenes = identify_many(scenes, pbar=pbar)
if len(scenes) == 0:
log.info('all scenes are already registered')
return
counter_regulars = 0
counter_duplicates = 0
list_duplicates = []
message = 'inserting {0} scene{1} into database'
log.info(message.format(len(scenes), '' if len(scenes) == 1 else 's'))
log.debug('testing changes in temporary database')
if pbar:
progress = pb.ProgressBar(max_value=len(scenes))
else:
progress = None
insertions = []
with self.Session() as session:
for i, id in enumerate(scenes):
basename = id.outname_base()
if not self.is_registered(id):
insertion = self.__prepare_insertion(id)
insertions.append(insertion)
counter_regulars += 1
log.debug('regular: {}'.format(id.scene))
elif not self.__is_registered_in_duplicates(id):
insertion = self.Duplicates(outname_base=basename,
scene=id.scene)
insertions.append(insertion)
counter_duplicates += 1
log.debug('duplicate: {}'.format(id.scene))
else:
list_duplicates.append(id.outname_base())
if progress is not None:
progress.update(i + 1)
if progress is not None:
progress.finish()
session.add_all(insertions)
if not test:
log.debug('committing transactions to permanent database')
# commit changes of the session
session.commit()
else:
log.info('rolling back temporary database changes')
# roll back changes of the session
session.rollback()
message = '{0} scene{1} registered regularly'
log.info(message.format(counter_regulars, '' if counter_regulars == 1 else 's'))
message = '{0} duplicate{1} registered'
log.info(message.format(counter_duplicates, '' if counter_duplicates == 1 else 's'))
def is_registered(self, scene: str | ID) -> bool:
"""
Simple check if a scene is already registered in the database.
Parameters
----------
scene:
the SAR scene
Returns
-------
is the scene already registered?
"""
id = scene if isinstance(scene, ID) else identify(scene)
with self.Session() as session:
# ORM query, where scene equals id.scene, return first
exists_data = session.query(self.Data.outname_base).filter_by(
outname_base=id.outname_base(), product=id.product).first()
exists_duplicates = session.query(self.Duplicates.outname_base).filter(
self.Duplicates.outname_base == id.outname_base()).first()
in_data = False
in_dup = False
if exists_data:
in_data = len(exists_data) != 0
if exists_duplicates:
in_dup = len(exists_duplicates) != 0
return in_data or in_dup
def __is_registered_in_duplicates(self, scene: str | ID) -> bool:
"""
Simple check if a scene is already registered in the database.
Parameters
----------
scene:
the SAR scene
Returns
-------
is the scene already registered?
"""
id = scene if isinstance(scene, ID) else identify(scene)
with self.Session() as session:
# ORM query as in is registered
exists_duplicates = session.query(self.Duplicates.outname_base).filter(
self.Duplicates.outname_base == id.outname_base()).first()
in_dup = False
if exists_duplicates:
in_dup = len(exists_duplicates) != 0
return in_dup
def cleanup(self) -> None:
"""
Remove all scenes from the database, which are no longer stored in their registered location
"""
missing = self.__select_missing('data')
for scene in missing:
log.info('Removing missing scene from database tables: {}'.format(scene))
self.drop_element(scene, with_duplicates=True)
@staticmethod
def to_str(string: str | bytes, encoding: str = 'utf-8') -> str:
if isinstance(string, bytes):
return string.decode(encoding)
else:
return string
def export2shp(self, path: str, table: str = 'data') -> None:
"""
export the database to a shapefile
Parameters
----------
path:
the path of the shapefile to be written.
This will overwrite other files with the same name.
If a folder is given in path it is created if not existing.
If the file extension is missing '.shp' is added.
table:
the table to write to the shapefile; either 'data' (default) or 'duplicates'
"""
if table not in self.get_tablenames():
log.warning('Only data and duplicates can be exported!')
return
# add the .shp extension if missing
if not path.endswith('.shp'):
path += '.shp'
# creates folder if not present, adds .shp if not within the path
dirname = os.path.dirname(path)
os.makedirs(dirname, exist_ok=True)
launder_names = {'acquisition_mode': 'acq_mode',
'orbitNumber_abs': 'orbit_abs',
'orbitNumber_rel': 'orbit_rel',
'cycleNumber': 'cycleNr',
'frameNumber': 'frameNr',
'outname_base': 'outname'}
sel_tables = ', '.join([f'"{s}" as {launder_names[s]}' if s in launder_names else s
for s in self.get_colnames(table)])
if self.driver == 'sqlite':
srcDS = self.dbfile
elif self.driver == 'postgresql':
srcDS = """PG:host={host} port={port} user={username}
dbname={database} password={password} active_schema=public""".format(**self.url_dict)
else:
raise RuntimeError('unknown archive driver')
gdal.VectorTranslate(destNameOrDestDS=path, srcDS=srcDS,
format='ESRI Shapefile',
SQLStatement=f'SELECT {sel_tables} FROM {table}',
SQLDialect=self.driver)
def filter_scenelist(self, scenelist: list[str | ID]) -> list[str | ID]:
"""
Filter a list of scenes by file names already registered in the database.
Parameters
----------
scenelist:
the scenes to be filtered
Returns
-------
The objects of `scenelist` for all scenes whose basename
is not yet registered in the database.
"""
for item in scenelist:
if not isinstance(item, (ID, str)):
raise TypeError("items in scenelist must be of type 'str' or 'pyroSAR.ID'")
with self.Session() as session:
# ORM query, get all scenes locations
scenes_data = session.query(self.Data.scene)
registered = [os.path.basename(self.to_str(x[0])) for x in scenes_data]
scenes_duplicates = session.query(self.Duplicates.scene)
duplicates = [os.path.basename(self.to_str(x[0])) for x in scenes_duplicates]
names = [item.scene if isinstance(item, ID) else item for item in scenelist]
filtered = [x for x, y in zip(scenelist, names)
if os.path.basename(y) not in registered + duplicates]
return filtered
def get_colnames(self, table: str = 'data') -> list[str]:
"""
Return the names of all columns of a table.
Returns
-------
the column names of the chosen table
"""
# get all columns of `table`, but shows geometry columns not correctly
table_info = Table(table, self.meta, autoload=True, autoload_with=self.engine)
col_names = table_info.c.keys()
return sorted([self.to_str(x) for x in col_names])
def get_tablenames(self, return_all: bool = False) -> list[str]:
"""
Return the names of all tables in the database
Parameters
----------
return_all:
only gives tables data and duplicates on default.
Set to True to get all other tables and views created automatically.
Returns
-------
the table names
"""
# TODO: make this dynamic
# the method was intended to only return user generated tables by default, as well as data and duplicates
all_tables = ['ElementaryGeometries', 'SpatialIndex', 'geometry_columns', 'geometry_columns_auth',
'geometry_columns_field_infos', 'geometry_columns_statistics', 'geometry_columns_time',
'spatial_ref_sys', 'spatial_ref_sys_aux', 'spatialite_history', 'sql_statements_log',
'sqlite_sequence', 'views_geometry_columns', 'views_geometry_columns_auth',
'views_geometry_columns_field_infos', 'views_geometry_columns_statistics',
'virts_geometry_columns', 'virts_geometry_columns_auth', 'virts_geometry_columns_field_infos',
'virts_geometry_columns_statistics', 'data_licenses', 'KNN']
# get tablenames from metadata
tables = sorted([self.to_str(x) for x in self.meta.tables.keys()])
if return_all:
return tables
else:
ret = []
for i in tables:
if i not in all_tables and 'idx_' not in i:
ret.append(i)
return ret
def get_unique_directories(self) -> list[str]:
"""
Get a list of directories containing registered scenes
Returns
-------
the directory names
"""
with self.Session() as session:
# ORM query, get all directories
scenes = session.query(self.Data.scene)
registered = [os.path.dirname(self.to_str(x[0])) for x in scenes]
return list(set(registered))
def import_outdated(self, dbfile: str | Archive) -> None:
"""
import an older database
Parameters
----------
dbfile:
the old database. If this is a string, the name of a CSV file is expected.
"""
if isinstance(dbfile, str) and dbfile.endswith('csv'):
with open(dbfile) as csvfile:
text = csvfile.read()
csvfile.seek(0)
dialect = csv.Sniffer().sniff(text)
reader = csv.DictReader(csvfile, dialect=dialect)
scenes = []
for row in reader:
scenes.append(row['scene'])
self.insert(scenes)
elif isinstance(dbfile, Archive):
with self.engine.begin() as conn:
scenes = conn.exec_driver_sql('SELECT scene from data')
scenes = [s.scene for s in scenes]
self.insert(scenes)
reinsert = dbfile.select_duplicates(value='scene')
if reinsert is not None:
self.insert(reinsert)
else:
raise RuntimeError("'dbfile' must either be a CSV file name or an Archive object")
def move(self, scenelist: list[str], directory: str, pbar: bool = False) -> None:
"""
Move a list of files while keeping the database entries up to date.
If a scene is registered in the database (in either the data or duplicates table),
the scene entry is directly changed to the new location.
Parameters
----------
scenelist:
the file locations
directory:
a folder to which the files are moved
pbar:
show a progress bar?
"""
if not os.path.isdir(directory):
os.mkdir(directory)
if not os.access(directory, os.W_OK):
raise RuntimeError('directory cannot be written to')
failed = []
double = []
if pbar:
progress = pb.ProgressBar(max_value=len(scenelist)).start()
else:
progress = None
for i, scene in enumerate(scenelist):
new = os.path.join(directory, os.path.basename(scene))
if os.path.isfile(new):
double.append(new)
continue
try:
shutil.move(scene, directory)
except shutil.Error:
failed.append(scene)
continue
finally:
if progress is not None:
progress.update(i + 1)
if self.select(scene=scene) != 0:
table = 'data'
else:
# using core connection to execute SQL syntax (as was before)
query = '''SELECT scene FROM duplicates WHERE scene='{0}' '''.format(scene)
with self.engine.begin() as conn:
query_duplicates = conn.exec_driver_sql(query)
if len(query_duplicates) != 0:
table = 'duplicates'
else:
table = None
if table:
# using core connection to execute SQL syntax (as was before)
query = '''UPDATE {0} SET scene= '{1}' WHERE scene='{2}' '''.format(table, new, scene)
with self.engine.begin() as conn:
conn.exec_driver_sql(query)
if progress is not None:
progress.finish()
if len(failed) > 0:
log.info('The following scenes could not be moved:\n{}'.format('\n'.join(failed)))
if len(double) > 0:
log.info('The following scenes already exist at the target location:\n{}'.format('\n'.join(double)))
def select(
self,
sensor: str | list[str] | None = None,
product: str | list[str] | None = None,
acquisition_mode: str | list[str] | None = None,
mindate: str | datetime | None = None,
maxdate: str | datetime | None = None,
vectorobject: Vector | None = None,
date_strict: bool = True,
processdir: str | None = None,
recursive: bool = False,
polarizations: list[str] | None = None,
return_value: str | list[str] = "scene",
**kwargs: Any
) -> list[str | bytes] | list[tuple[str | bytes]]:
"""
select scenes from the database
Parameters
----------
sensor:
the satellite sensor(s)
product:
the product type(s)
acquisition_mode:
the sensor's acquisition mode(s)
mindate:
the minimum acquisition date; strings must be in format YYYYmmddTHHMMSS; default: None
maxdate:
the maximum acquisition date; strings must be in format YYYYmmddTHHMMSS; default: None
vectorobject:
a geometry with which the scenes need to overlap. The object may only contain one feature.
date_strict:
treat dates as strict limits or also allow flexible limits to incorporate scenes
whose acquisition period overlaps with the defined limit?
- strict: start >= mindate & stop <= maxdate
- not strict: stop >= mindate & start <= maxdate
processdir:
A directory to be scanned for already processed scenes;
the selected scenes will be filtered to those that have not yet been processed. Default: None
recursive:
(only if `processdir` is not None) should also the subdirectories of the `processdir` be scanned?
polarizations:
a list of polarization strings, e.g. ['HH', 'VV']
return_value:
the query return value(s). Options:
- `geometry_wkb`: the scene's footprint geometry formatted as WKB
- `geometry_wkt`: the scene's footprint geometry formatted as WKT
- `mindate`: the acquisition start datetime in UTC formatted as YYYYmmddTHHMMSS
- `maxdate`: the acquisition end datetime in UTC formatted as YYYYmmddTHHMMSS
- all further database column names (see :meth:`~Archive.get_colnames()`)
**kwargs:
any further arguments (columns), which are registered in the database. See :meth:`~Archive.get_colnames()`
Returns
-------
If a single return_value is specified: list of values for that attribute.
If multiple return_values are specified: list of tuples containing the requested attributes.
The return value type is bytes for `geometry_wkb` and str for all others.
"""
# Convert return_value to list if it's a string
if isinstance(return_value, str):
return_values = [return_value]
else:
return_values = return_value
return_values_sql = []
for val in return_values:
if val == 'mindate':
return_values_sql.append('start')
elif val == 'maxdate':
return_values_sql.append('stop')
elif val == 'geometry_wkt':
prefix = 'ST_' if self.driver == 'postgresql' else ''
return_values_sql.append(f'{prefix}AsText(geometry) as geometry_wkt')
elif val == 'geometry_wkb':
prefix = 'ST_' if self.driver == 'postgresql' else ''
return_values_sql.append(f'{prefix}AsBinary(geometry) as geometry_wkb')
else:
return_values_sql.append(val)
# Validate that all requested return values exist in the database
valid_columns = self.get_colnames()
extra = ['mindate', 'maxdate', 'geometry_wkt', 'geometry_wkb']
normal_returns = [x for x in return_values if x not in extra]
invalid_returns = [x for x in normal_returns if x not in valid_columns]
if invalid_returns:
invalid_str = ', '.join(invalid_returns)
msg = (f"The following options are not supported as "
f"return values: {invalid_str}")
raise ValueError(msg)
arg_valid = [x for x in kwargs.keys() if x in self.get_colnames()]
arg_invalid = [x for x in kwargs.keys() if x not in self.get_colnames()]
if len(arg_invalid) > 0:
log.info(f"the following arguments will be ignored as they are not "
f"registered in the data base: {', '.join(arg_invalid)}")
def convert_general(k: str, v: Any) -> str:
if isinstance(v, (float, int, str)):
return f"""{k}='{v}'"""
elif isinstance(v, (tuple, list)):
v_str = "', '".join(map(str, v))
return f"""{k} IN ('{v_str}')"""
else:
raise TypeError(f"unsupported type for '{k}': {type(v)}")
arg_format = []
vals = []
for key in arg_valid:
if key == 'scene':
arg_format.append('''scene LIKE '%%{0}%%' '''.format(os.path.basename(kwargs[key])))
else:
arg_format.append(convert_general(key, kwargs[key]))
if sensor:
arg_format.append(convert_general('sensor', sensor))
if product:
arg_format.append(convert_general('product', product))
if acquisition_mode:
arg_format.append(convert_general('acquisition_mode', acquisition_mode))
if mindate:
if isinstance(mindate, datetime):
mindate = mindate.strftime('%Y%m%dT%H%M%S')
if re.search('[0-9]{8}T[0-9]{6}', mindate):
if date_strict:
arg_format.append('start>=?')
else:
arg_format.append('stop>=?')
vals.append(mindate)
else:
log.info('WARNING: argument mindate is ignored, must be in format YYYYmmddTHHMMSS')
if maxdate:
if isinstance(maxdate, datetime):
maxdate = maxdate.strftime('%Y%m%dT%H%M%S')
if re.search('[0-9]{8}T[0-9]{6}', maxdate):
if date_strict:
arg_format.append('stop<=?')
else:
arg_format.append('start<=?')
vals.append(maxdate)
else:
log.info('WARNING: argument maxdate is ignored, must be in format YYYYmmddTHHMMSS')
if polarizations:
for pol in polarizations:
if pol in ['HH', 'VV', 'HV', 'VH']:
arg_format.append('{}=1'.format(pol.lower()))
if vectorobject:
if isinstance(vectorobject, Vector):
if vectorobject.nfeatures > 1:
raise RuntimeError("'vectorobject' contains more than one feature.")
with vectorobject.clone() as vec:
vec.reproject(4326)
site_geom = vec.convert2wkt(set3D=False)[0]
# postgres has a different way to store geometries
if self.driver == 'postgresql':
statement = f"st_intersects(geometry, 'SRID=4326; {site_geom}')"
arg_format.append(statement)
else:
arg_format.append('st_intersects(GeomFromText(?, 4326), geometry) = 1')
vals.append(site_geom)
else:
log.info('WARNING: argument vectorobject is ignored, must be of type spatialist.vector.Vector')
if len(arg_format) > 0:
subquery = ' WHERE {}'.format(' AND '.join(arg_format))
else:
subquery = ''
# Modify the query to select the requested return values
query = 'SELECT {}, outname_base FROM data{}'.format(', '.join(return_values_sql), subquery)
# the query gets assembled stepwise here
for val in vals:
query = query.replace('?', """'{0}'""", 1).format(val)
log.debug(query)
# core SQL execution
with self.engine.begin() as conn:
query_rs = conn.exec_driver_sql(query)
if processdir and os.path.isdir(processdir):
scenes = [x for x in query_rs
if len(finder(processdir, [x[-1]],
regex=True, recursive=recursive)) == 0]
else:
scenes = query_rs
ret = []
for x in scenes:
# If only one return value was requested, append just that value
if len(return_values) == 1:
ret.append(self.to_str(x[0]))
else:
# If multiple return values were requested, append a tuple of all values
values = []
for k, v in zip(return_values, x[:-1]): # Exclude outname_base
if k == 'geometry_wkb':
values.append(v)
else:
values.append(self.to_str(v))
ret.append(tuple(values))
return ret
def select_duplicates(
self,
outname_base: str | None = None,
scene: str | None = None,
value: Literal["id", "scene"] = "id"
) -> list[str]:
"""
Select scenes from the duplicates table. In case both `outname_base` and `scene` are set to None all scenes in
the table are returned, otherwise only those that match the attributes `outname_base` and `scene` if they are not None.
Parameters
----------
outname_base:
the basename of the scene
scene:
the scene name
value:
the return value; either 'id' or 'scene'
Returns
-------
the selected scene(s)
"""
if value == 'id':
key = 0
elif value == 'scene':
key = 1
else:
raise ValueError("argument 'value' must be either 0 or 1")
with self.engine.begin() as conn:
if not outname_base and not scene:
# core SQL execution
scenes = conn.exec_driver_sql('SELECT * from duplicates')
else:
cond = []
arg = []
if outname_base:
cond.append('outname_base=?')
arg.append(outname_base)
if scene:
cond.append('scene=?')
arg.append(scene)
query = 'SELECT * from duplicates WHERE {}'.format(' AND '.join(cond))
for a in arg:
query = query.replace('?', ''' '{0}' ''', 1).format(a)
# core SQL execution
scenes = conn.exec_driver_sql(query)
ret = []
for x in scenes:
ret.append(self.to_str(x[key]))
return ret
@property
def size(self) -> tuple[int, int]:
"""
get the number of scenes registered in the database
Returns
-------
the number of scenes in (1) the main table and (2) the duplicates table
"""
# ORM query
with self.Session() as session:
r1 = session.query(self.Data.outname_base).count()
r2 = session.query(self.Duplicates.outname_base).count()
return r1, r2
def __enter__(self) -> Archive:
return self
def close(self) -> None:
"""
close the database connection
"""
self.engine.dispose()
gc.collect(generation=2) # this was added as a fix for win PermissionError when deleting sqlite.db files.
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None
) -> None:
self.close()
def drop_element(
self,
scene: str,
with_duplicates: bool = False
) -> None:
"""
Drop a scene from the data table.
If the duplicates table contains a matching entry, it will be moved to the data table.
Parameters
----------
scene:
a SAR scene
with_duplicates:
True: delete matching entry in duplicates table
False: move matching entry from duplicates into data table
"""
# save outname_base from to be deleted entry
search = self.data_schema.select().where(self.data_schema.c.scene == scene)
entry_data_outname_base = []
with self.engine.begin() as conn:
for rowproxy in conn.execute(search):
entry_data_outname_base.append((rowproxy[12]))
# log.info(entry_data_outname_base)
# delete entry in data table
delete_statement = self.data_schema.delete().where(self.data_schema.c.scene == scene)
with self.engine.begin() as conn:
conn.execute(delete_statement)
return_sentence = 'Entry with scene-id: \n{} \nwas dropped from data'.format(scene)
# with_duplicates == True, delete entry from duplicates
if with_duplicates:
delete_statement_dup = self.duplicates_schema.delete().where(
self.duplicates_schema.c.outname_base == entry_data_outname_base[0])
with self.engine.begin() as conn:
conn.execute(delete_statement_dup)
log.info(return_sentence + ' and duplicates!'.format(scene))
return
# else select scene info matching outname_base from duplicates
select_in_duplicates_statement = self.duplicates_schema.select().where(
self.duplicates_schema.c.outname_base == entry_data_outname_base[0])
entry_duplicates_scene = []
with self.engine.begin() as conn:
for rowproxy in conn.execute(select_in_duplicates_statement):
entry_duplicates_scene.append((rowproxy[1]))
# check if there is a duplicate
if len(entry_duplicates_scene) == 1:
# remove entry from duplicates
delete_statement_dup = self.duplicates_schema.delete().where(
self.duplicates_schema.c.outname_base == entry_data_outname_base[0])
with self.engine.begin() as conn:
conn.execute(delete_statement_dup)
# insert scene from duplicates into data
self.insert(entry_duplicates_scene[0])
return_sentence += ' and entry with outname_base \n{} \nand scene \n{} \n' \
'was moved from duplicates into data table'.format(
entry_data_outname_base[0], entry_duplicates_scene[0])
log.info(return_sentence + '!')
def drop_table(self, table: str) -> None:
"""
Drop a table from the database.
Parameters
----------
table:
the table name
"""
if table in self.get_tablenames(return_all=True):
# this removes the idx tables and entries in geometry_columns for sqlite databases
if self.driver == 'sqlite':
with self.engine.begin() as conn:
query = "SELECT f_table_name FROM geometry_columns"
tab_with_geom = [rowproxy[0] for rowproxy
in conn.exec_driver_sql(query)]
if table in tab_with_geom:
conn.exec_driver_sql("SELECT DropGeoTable('" + table + "')")
else:
table_info = Table(table, self.meta, autoload=True, autoload_with=self.engine)
table_info.drop(self.engine)
log.info('table {} dropped from database.'.format(table))
else:
raise ValueError("table {} is not registered in the database!".format(table))
self.Base = automap_base(metadata=self.meta)
self.Base.prepare(self.engine, reflect=True)
@staticmethod
def __is_open(ip: str, port: str | int) -> bool:
"""
Checks server connection, from Ben Curtis (github: Fmstrat)
Parameters
----------
ip:
ip of the server
port:
port of the server
Returns
-------
is the server reachable?
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(3)
try:
s.connect((ip, int(port)))
s.shutdown(socket.SHUT_RDWR)
return True
except:
return False
finally:
s.close()
def __check_host(self, ip: str, port: str | int) -> bool:
"""
Calls __is_open() on ip and port, from Ben Curtis (github: Fmstrat)
Parameters
----------
ip:
ip of the server
port:
port of the server
Returns
-------
is the server reachable?
"""
ipup = False
for i in range(2):
if self.__is_open(ip, port):
ipup = True
break
else:
time.sleep(5)
return ipup
def drop_archive(archive: Archive) -> None:
"""
drop (delete) a scene database
Parameters
----------
archive:
the database to be deleted
See Also
--------
:func:`sqlalchemy_utils.functions.drop_database()`
Examples
--------
>>> pguser = os.environ.get('PGUSER')
>>> pgpassword = os.environ.get('PGPASSWORD')
>>> db = Archive('test', postgres=True, port=5432, user=pguser, password=pgpassword)
>>> drop_archive(db)
"""
if archive.driver == 'postgresql':
url = archive.url
archive.close()
drop_database(url)
else:
raise RuntimeError('this function only works for PostgreSQL databases.'
'For SQLite databases it is recommended to just delete the DB file.')
================================================
FILE: pyroSAR/auxdata.py
================================================
###############################################################################
# tools for handling auxiliary data in software pyroSAR
# Copyright (c) 2019-2026, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
import os
import re
import csv
import ssl
import json
import numpy
import fnmatch
import ftplib
import requests
import zipfile as zf
from lxml import etree
from math import ceil, floor
from urllib.parse import urlparse
from collections import defaultdict
from packaging import version
from pyroSAR.examine import ExamineSnap
from pyroSAR.ancillary import Lock
from spatialist.raster import Raster, Dtype
from spatialist.vector import bbox
from spatialist.ancillary import dissolve, finder
from spatialist.auxil import gdalbuildvrt, crsConvert, gdalwarp
from spatialist.envi import HDRobject
from osgeo import gdal
import logging
log = logging.getLogger(__name__)
def dem_autoload(geometries, demType, vrt=None, buffer=None, username=None,
password=None, product='dem', crop=True, lock_timeout=600,
offline=False):
"""
obtain all relevant DEM tiles for selected geometries and optionally mosaic them in a VRT.
Parameters
----------
geometries: list[spatialist.vector.Vector] or None
a list of :class:`spatialist.vector.Vector` geometries to obtain DEM data for;
CRS must be WGS84 LatLon (EPSG 4326). Can be set to None for global extent.
demType: str
the type of DEM to be used; current options:
- 'AW3D30' (ALOS Global Digital Surface Model "ALOS World 3D - 30m")
* info: https://www.eorc.jaxa.jp/ALOS/en/aw3d30/index.htm
* url: ftp://ftp.eorc.jaxa.jp/pub/ALOS/ext1/AW3D30/release_v1804
* height reference: EGM96
- 'Copernicus 10m EEA DEM' (Copernicus 10 m DEM available over EEA-39 countries)
* registration: https://spacedata.copernicus.eu/web/cscda/data-access/registration
* url: ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_EEA-10-DGED/2021_1
* height reference: EGM2008
- 'Copernicus 30m Global DEM'
* info: https://registry.opendata.aws/copernicus-dem
* url: https://copernicus-dem-30m-stac.s3.amazonaws.com
* height reference: EGM2008
- 'Copernicus 30m Global DEM II'
* registration: https://spacedata.copernicus.eu/web/cscda/data-access/registration
* url: ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_GLO-30-DGED/2021_1
* height reference: EGM2008
- 'Copernicus 90m Global DEM'
* info: https://registry.opendata.aws/copernicus-dem
* url: https://copernicus-dem-90m-stac.s3.amazonaws.com
* height reference: EGM2008
- 'Copernicus 90m Global DEM II'
* registration: https://spacedata.copernicus.eu/web/cscda/data-access/registration
* url: ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_GLO-90-DGED/2021_1
* height reference: EGM2008
- 'GETASSE30'
* info: https://seadas.gsfc.nasa.gov/help-8.1.0/desktop/GETASSE30ElevationModel.html
* url: https://step.esa.int/auxdata/dem/GETASSE30
* height reference: WGS84
- 'SRTM 1Sec HGT'
* url: https://step.esa.int/auxdata/dem/SRTMGL1
* height reference: EGM96
- 'SRTM 3Sec'
* url: https://step.esa.int/auxdata/dem/SRTM90/tiff
* height reference: EGM96
vrt: str or None
an optional GDAL VRT file created from the obtained DEM tiles
buffer: int, float, None
a buffer in degrees to add around the individual geometries
username: str or None
(optional) the username for services requiring registration
password: str or None
(optional) the password for the registration account
product: str
the sub-product to extract from the DEM product.
The following options are available for the respective DEM types:
- 'AW3D30'
* 'dem': the actual Digital Elevation Model
* 'msk': mask information for each pixel (Cloud/Snow Mask, Land water and
low correlation mask, Sea mask, Information of elevation dataset used
for the void-filling processing)
* 'stk': number of DSM-scene files which were used to produce the 5 m resolution DSM
- 'Copernicus 10m EEA DEM'
* 'dem': the actual Digital Elevation Model
* 'edm': editing mask
* 'flm': filling mask
* 'hem': height error mask
* 'wbm': water body mask
- 'Copernicus 30m Global DEM'
* 'dem': the actual Digital Elevation Model
* 'edm': Editing Mask
* 'flm': Filling Mask
* 'hem': Height Error Mask
* 'wbm': Water Body Mask
- 'Copernicus 30m Global DEM II'
* 'dem': the actual Digital Elevation Model
* 'edm': editing mask
* 'flm': filling mask
* 'hem': height error mask
* 'wbm': water body mask
- 'Copernicus 90m Global DEM'
* 'dem': the actual Digital Elevation Model
* 'edm': Editing Mask
* 'flm': Filling Mask
* 'hem': Height Error Mask
* 'wbm': Water Body Mask
- 'Copernicus 90m Global DEM II'
* 'dem': the actual Digital Elevation Model
* 'edm': editing mask
* 'flm': filling mask
* 'hem': height error mask
* 'wbm': water body mask
- 'GETASSE30'
* 'dem': the actual Digital Elevation Model
- 'SRTM 1Sec HGT'
* 'dem': the actual Digital Elevation Model
- 'SRTM 3Sec'
* 'dem': the actual Digital Elevation Model
crop: bool
crop to the provided geometries (or return the full extent of the DEM tiles)?
lock_timeout: int
how long to wait to acquire a lock on the downloaded files?
offline: bool
work offline? If `True`, only locally existing files are considered
and no online check is performed. If a file is missing, an error is
raised. For this to work, the function needs to be run in `online`
mode once to create a local index.
Returns
-------
list[str] or None
the names of the obtained files or None if a VRT file was defined
Examples
--------
download all SRTM 1 arcsec DEMs overlapping with a Sentinel-1 scene and mosaic them to a single GeoTIFF file
.. code-block:: python
from pyroSAR import identify
from pyroSAR.auxdata import dem_autoload
from spatialist import gdalwarp
# identify the SAR scene
filename = 'S1A_IW_SLC__1SDV_20150330T170734_20150330T170801_005264_006A6C_DA69.zip'
scene = identify(filename)
# extract the bounding box as spatialist.Vector object
bbox = scene.bbox()
# download the tiles and virtually combine them in an in-memory
# VRT file subsetted to the extent of the SAR scene plus a buffer of 0.01 degrees
vrt = '/vsimem/srtm1.vrt'
dem_autoload(geometries=[bbox], demType='SRTM 1Sec HGT',
vrt=vrt, buffer=0.01)
# write the final GeoTIFF file
outname = scene.outname_base() + 'srtm1.tif'
gdalwarp(src=vrt, dst=outname, options={'format': 'GTiff'})
# alternatively use function dem_create and warp the DEM to UTM
# including conversion from geoid to ellipsoid heights
from pyroSAR.auxdata import dem_create
outname = scene.outname_base() + 'srtm1_ellp.tif'
dem_create(src=vrt, dst=outname, t_srs=32632, tr=(30, 30),
geoid_convert=True, geoid='EGM96')
"""
with DEMHandler(geometries) as handler:
return handler.load(dem_type=demType,
username=username,
password=password,
vrt=vrt,
buffer=buffer,
product=product,
crop=crop,
lock_timeout=lock_timeout,
offline=offline)
def dem_create(src, dst, t_srs=None, tr=None, threads=None,
geoid_convert=False, geoid='EGM96', nodata=None,
resampleAlg='bilinear', dtype=None, pbar=False,
**kwargs):
"""
Create a new DEM GeoTIFF file and optionally convert heights from geoid to ellipsoid.
This is basically a convenience wrapper around :func:`osgeo.gdal.Warp` via :func:`spatialist.auxil.gdalwarp`.
The following argument defaults deviate from those of :func:`osgeo.gdal.WarpOptions`:
- `format` is set to 'GTiff'
- `resampleAlg` is set to 'bilinear'
- `targetAlignedPixels` is set to 'True'
Parameters
----------
src: str
the input dataset, e.g. a VRT from function :func:`dem_autoload`
dst: str
the output dataset
t_srs: None, int, str or osgeo.osr.SpatialReference
A target geographic reference system in WKT, EPSG, PROJ4 or OPENGIS format.
See function :func:`spatialist.auxil.crsConvert()` for details.
Default (None): use the crs of ``src``.
tr: None or tuple[int or float]
the target resolution as (xres, yres)
threads: int, str or None
the number of threads to use. Possible values:
- Default `None`: use the value of `GDAL_NUM_THREADS` without modification. If `GDAL_NUM_THREADS` is None,
multi-threading is still turned on and two threads are used, one for I/O and one for computation.
- integer value: temporarily modify `GDAL_NUM_THREADS` and reset it once done.
If 1, multithreading is turned off.
- `ALL_CPUS`: special string to use all cores/CPUs of the computer; will also temporarily
modify `GDAL_NUM_THREADS`.
geoid_convert: bool
convert geoid heights?
geoid: str
the geoid model to be corrected, only used if ``geoid_convert == True``; current options:
- 'EGM96'
- 'EGM2008'
nodata: int or float or str or None
the no data value of the source and destination files.
Can be used if no source nodata value can be read or to override it.
A special string 'None' can be used to skip reading the value from the source file.
resampleAlg: str
the resampling algorithm tu be used. See here for options:
https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r
dtype: str or None
override the data type of the written file; Default None: use same type as source data.
Data type notations of GDAL (e.g. `Float32`) and numpy (e.g. `int8`) are supported.
See :class:`spatialist.raster.Dtype`.
pbar: bool
add a progressbar?
**kwargs
additional keyword arguments to be passed to :func:`spatialist.auxil.gdalwarp`.
See :func:`osgeo.gdal.WarpOptions` for options. The following arguments cannot
be set as they are controlled internally:
- `xRes`, `yRes`: controlled via argument `tr`
- `srcSRS`, `dstSRS`: controlled via the CRS of `src` and arguments `t_srs`, `geoid`, `geoid_convert`
- `srcNodata`, `dstNodata`: controlled via argument `nodata`
- `outputType`: controlled via argument `dtype`
- `multithread` controlled via argument `threads`
Returns
-------
"""
vrt_check_sources(src)
with Raster(src) as ras:
if nodata is None:
nodata = ras.nodata
if tr is None:
tr = ras.res
epsg_in = ras.epsg
if t_srs is None:
epsg_out = epsg_in
else:
epsg_out = crsConvert(t_srs, 'epsg')
threads_system = gdal.GetConfigOption('GDAL_NUM_THREADS')
if threads is None:
threads = threads_system
try:
threads = int(threads)
except (ValueError, TypeError):
pass
if isinstance(threads, str):
if threads != 'ALL_CPUS':
raise ValueError("unsupported value for 'threads': '{}'".format(threads))
else:
multithread = True
gdal.SetConfigOption('GDAL_NUM_THREADS', threads)
elif isinstance(threads, int):
if threads == 1:
multithread = False
elif threads > 1:
multithread = True
gdal.SetConfigOption('GDAL_NUM_THREADS', str(threads))
else:
raise ValueError("if 'threads' is of type int, it must be >= 1")
elif threads is None:
multithread = True
else:
raise TypeError("'threads' must be of type int, str or None. Is: {}".format(type(threads)))
gdalwarp_args = {'format': 'GTiff', 'multithread': multithread,
'srcNodata': nodata, 'dstNodata': nodata,
'srcSRS': 'EPSG:{}'.format(epsg_in),
'dstSRS': 'EPSG:{}'.format(epsg_out),
'resampleAlg': resampleAlg,
'xRes': tr[0], 'yRes': tr[1],
'targetAlignedPixels': True}
if dtype is not None:
gdalwarp_args['outputType'] = Dtype(dtype).gdalint
if geoid_convert:
geoid_epsg = {'EGM96': 5773,
'EGM2008': 3855}
if geoid in geoid_epsg.keys():
epsg = geoid_epsg[geoid]
gdalwarp_args['srcSRS'] += '+{}'.format(epsg)
# the following line is a workaround for older GDAL versions that did not
# support compound EPSG codes. See https://github.com/OSGeo/gdal/pull/4639.
if version.parse(gdal.__version__) < version.parse('3.4.0'):
gdalwarp_args['srcSRS'] = crsConvert(gdalwarp_args['srcSRS'], 'proj4')
else:
raise RuntimeError('geoid model not yet supported')
try:
get_egm_lookup(geoid=geoid, software='PROJ')
except OSError as e:
errstr = str(e)
raise RuntimeError(errstr)
locked = ['xRes', 'yRes', 'srcSRS', 'dstSRS', 'srcNodata',
'dstNodata', 'outputType', 'multithread']
for key, val in kwargs.items():
if key not in locked:
gdalwarp_args[key] = val
else:
msg = "argument '{}' cannot be set via kwargs as it is set internally."
raise RuntimeError(msg.format(key))
try:
if not os.path.isfile(dst):
message = 'creating mosaic'
crs = gdalwarp_args['dstSRS']
if crs != 'EPSG:4326':
message += ' and reprojecting to {}'.format(crs)
log.info(f'{message}: {dst}')
gdalwarp(src=src, dst=dst, pbar=pbar, **gdalwarp_args)
else:
log.info(f'mosaic already exists: {dst}')
except Exception:
if os.path.isfile(dst):
os.remove(dst)
raise
finally:
gdal.SetConfigOption('GDAL_NUM_THREADS', threads_system)
class DEMHandler:
"""
An interface to obtain DEM data for selected geometries.
The files are downloaded into the ESA SNAP auxiliary data directory structure.
This class is the foundation for the convenience function :func:`~pyroSAR.auxdata.dem_autoload`.
Parameters
----------
geometries: list[spatialist.vector.Vector] or None
a list of geometries
"""
def __init__(self, geometries):
if not (isinstance(geometries, list) or geometries is None):
raise RuntimeError('geometries must be of type list')
if geometries is not None:
for geometry in geometries:
if geometry.getProjection('epsg') != 4326:
raise RuntimeError('input geometry CRS must be WGS84 LatLon (EPSG 4326)')
self.geometries = geometries
try:
self.auxdatapath = ExamineSnap().auxdatapath
except AttributeError:
self.auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return
@staticmethod
def __applybuffer(extent, buffer):
ext = dict(extent)
if buffer is not None:
ext['xmin'] -= buffer
ext['xmax'] += buffer
ext['ymin'] -= buffer
ext['ymax'] += buffer
return ext
def __find_first(self, dem_type, product):
outdir = os.path.join(self.auxdatapath, 'dem', dem_type)
vsi = self.config[dem_type]['vsi']
pattern = fnmatch.translate(self.config[dem_type]['pattern'][product])
for root, dirs, files in os.walk(outdir):
for file in files:
if vsi is None:
if re.search(pattern, file):
return os.path.join(root, file)
else:
if re.search(r'\.(?:zip|tar(\.gz)?)$', file):
fname = os.path.join(root, file)
content = finder(fname, [pattern], regex=True)
if len(content) > 0:
if dem_type == 'GETASSE30':
getasse30_hdr(fname)
return vsi + content[0]
@staticmethod
def __buildvrt(tiles, vrtfile, pattern, vsi, extent, src_nodata=None,
dst_nodata=None, hide_nodata=False, resolution=None,
tap=True, dst_datatype=None):
"""
Build a VRT mosaic from DEM tiles. The VRT is cropped to the specified `extent` but the pixel grid
of the source files is preserved and no resampling/shifting is applied.
Parameters
----------
tiles: list[str]
a list of DEM files or compressed archives containing DEM files
vrtfile: str
the output VRT filename
pattern: str
the search pattern for finding DEM tiles in compressed archives
vsi: str or None
the GDAL VSI directive to prepend the DEM tile name, e.g. /vsizip/ or /vsitar/
extent: dict
a dictionary with keys `xmin`, `ymin`, `xmax` and `ymax`
src_nodata: int or float or None
the nodata value of the source DEM tiles; default None: read the value from the first item in `tiles`
dst_nodata: int or float or None
the nodata value of the output VRT file.
Default None: do not define a nodata value and use `src_nodata` instead.
hide_nodata: bool
hide the nodata value of the output VRT file?
resolution: tuple[int or float] or None
the spatial resolution (X, Y) of the source DEM tiles.
Default None: read the value from the first item in `tiles`
tap: bool
align target pixels?
dst_datatype: int or str or None
the VRT data type as supported by :class:`spatialist.raster.Dtype`.
Default None: use the same data type as the source files.
Returns
-------
"""
if vsi is not None and not tiles[0].endswith('.tif'):
locals = [vsi + x for x in dissolve([finder(x, [pattern]) for x in tiles])]
else:
locals = tiles
with Raster(locals[0]) as ras:
if src_nodata is None:
src_nodata = ras.nodata
if resolution is None:
xres, yres = ras.res
else:
xres, yres = resolution
opts = {'srcNodata': src_nodata,
'targetAlignedPixels': tap,
'xRes': xres, 'yRes': yres, 'hideNodata': hide_nodata
}
if dst_nodata is not None:
opts['VRTNodata'] = dst_nodata
opts['outputBounds'] = (extent['xmin'], extent['ymin'],
extent['xmax'], extent['ymax'])
gdalbuildvrt(src=locals, dst=vrtfile, **opts)
if dst_datatype is not None:
datatype = Dtype(dst_datatype).gdalstr
tree = etree.parse(source=vrtfile)
band = tree.find(path='VRTRasterBand')
band.attrib['dataType'] = datatype
tree.write(file=vrtfile, pretty_print=True,
xml_declaration=False, encoding='utf-8')
def __commonextent(self, buffer=None):
"""
Parameters
----------
buffer: int or float or None
Returns
-------
dict
"""
ext_new = {}
for geo in self.geometries:
if len(ext_new.keys()) == 0:
ext_new = geo.extent
else:
for key in ['xmin', 'ymin']:
if geo.extent[key] > ext_new[key]:
ext_new[key] = geo.extent[key]
for key in ['xmax', 'ymax']:
if geo.extent[key] < ext_new[key]:
ext_new[key] = geo.extent[key]
ext_new = self.__applybuffer(ext_new, buffer)
return ext_new
@staticmethod
def __create_dummy_dem(filename, extent):
"""
Create a dummy file which spans the given extent and
is 1x1 pixels large to be as small as possible.
This file is used to create dummy DEMs over ocean.
"""
driver = gdal.GetDriverByName('GTiff')
dataset = driver.Create(filename, 1, 1, 1, 1)
geo = [
extent['xmin'],
extent['xmax'] - extent['xmin'],
0,
extent['ymax'],
0,
extent['ymin'] - extent['ymax'] # negative
]
dataset.SetGeoTransform(geo)
dataset.SetProjection('EPSG:4326')
band = dataset.GetRasterBand(1)
band.SetNoDataValue(255)
mat = numpy.zeros(shape=(1, 1))
band.WriteArray(mat, 0, 0)
band.FlushCache()
del mat
band = None
dataset = None
driver = None
@staticmethod
def intrange(extent, step):
"""
generate a sequence of integer coordinates marking
the tie points of the individual DEM tiles.
Parameters
----------
extent: dict or None
a dictionary with keys `xmin`, `xmax`, `ymin` and `ymax`
with coordinates in EPSG:4326 or None to use a global extent.
step: int
the sequence steps
Returns
-------
tuple[range]
the integer sequences as (latitude, longitude)
"""
if extent is None:
lat = range(-90, 90)
lon = range(-180, 180)
else:
lat = range(floor(float(extent['ymin']) / step) * step,
ceil(float(extent['ymax']) / step) * step,
step)
lon = range(floor(float(extent['xmin']) / step) * step,
ceil(float(extent['xmax']) / step) * step,
step)
return lat, lon
def __get_resolution(self, dem_type, y):
"""
Parameters
----------
dem_type: str
the DEM type
y: int or float
the latitude for which to get the resolution
Returns
-------
tuple
(xres, yres)
"""
for key, val in self.config[dem_type]['resolution'].items():
ymin, ymax = [int(y) for y in key.split('-')]
if ymin <= abs(y) <= ymax:
return val
def __local_index(self, dem_type):
path = os.path.join(self.auxdatapath, 'dem', dem_type, 'index.json')
os.makedirs(os.path.dirname(path), exist_ok=True)
if not os.path.isfile(path):
with Lock(str(path)):
if dem_type in ['Copernicus 30m Global DEM',
'Copernicus 90m Global DEM']:
log.debug(f"building local index for DEM type '{dem_type}'")
res = re.search('[39]0', dem_type).group()
catalog_json = f"dem_cop_{res}.json"
URL_STAC = self.config[dem_type]['url']
marker = None
out = defaultdict(defaultdict)
while True:
params = {}
if marker:
params["marker"] = marker
r = requests.get(URL_STAC, params=params)
root = etree.fromstring(r.content)
is_truncated = root.find(path="./IsTruncated",
namespaces=root.nsmap).text == "true"
items = [x.text for x in root.findall(path="./Contents/Key",
namespaces=root.nsmap)]
if marker is None:
del items[items.index(catalog_json)]
marker = items[-1]
items = sorted([URL_STAC + '/' + x for x in items])
URL = None
for item in items:
if URL is None:
content = requests.get(item).json()
href = content['assets']['elevation']['href']
URL = 'https://' + urlparse(href).netloc
base = os.path.basename(item).replace('.json', '')
lat = re.search('[NS][0-9]{2}', base).group()
lon = re.search('[EW][0-9]{3}', base).group()
prefix = f"{URL}/{base}_DEM"
sub = {
"dem": f"{prefix}/{base}_DEM.tif",
"edm": f"{prefix}/AUXFILES/{base}_EDM.tif",
"flm": f"{prefix}/AUXFILES/{base}_FLM.tif",
"wbm": f"{prefix}/AUXFILES/{base}_WBM.tif",
"hem": f"{prefix}/AUXFILES/{base}_HEM.tif"
}
out[lat][lon] = sub
if not is_truncated:
break
elif dem_type in ['GETASSE30', 'SRTM 1Sec HGT', 'SRTM 3Sec']:
url = self.config[dem_type]['url']
response = requests.get(url)
response.raise_for_status()
items = re.findall(r'href="([^"]+)"', response.text)
out = defaultdict(lambda: defaultdict(dict))
patterns = {
'GETASSE30': '(?P[0-9]{2}[NS])(?P[0-9]{3}[EW])',
'SRTM 1Sec HGT': '(?P[NS][0-9]{2})(?P[EW][0-9]{3})',
'SRTM 3Sec': '(?P[0-9]{2})_(?P[0-9]{2})'
}
for item in items:
if item == '../':
continue
link = url.rstrip('/') + '/' + item
coord = re.search(patterns[dem_type], item).groupdict()
out[coord['lat']][coord['lon']] = {'dem': link}
else:
raise RuntimeError(f"local indexing is not supported "
f"for DEM type {dem_type}")
with open(path, 'w') as f:
json.dump(out, f, indent=4)
with Lock(str(path), soft=True):
with open(path, 'r') as f:
index = json.load(f)
return index
@staticmethod
def __retrieve(
urls: list[str],
outdir: str,
offline: bool = False,
lock_timeout: int = 600
) -> list[str]:
if len(urls) == 0:
return []
# check that base URL is reachable
if not offline:
url_parse = urlparse(urls[0])
url_base = url_parse.scheme + '://' + url_parse.netloc
r = requests.get(url_base)
r.raise_for_status()
r.close()
urls = list(set(urls))
os.makedirs(outdir, exist_ok=True)
locals = []
n = len(urls)
for i, remote in enumerate(urls):
local = os.path.join(outdir, os.path.basename(remote))
if not os.path.isfile(local):
if offline:
raise RuntimeError(f'file not found locally: {local}')
else:
with Lock(local, timeout=lock_timeout):
r = requests.get(remote)
# a tile might not exist over the ocean
if r.status_code == 404:
r.close()
continue
msg = '[{i: >{w}}/{n}] {l} <<-- {r}'
log.info(msg.format(i=i + 1, w=len(str(n)),
n=n, l=local, r=remote))
r.raise_for_status()
with open(local, 'wb') as output:
output.write(r.content)
r.close()
else:
msg = '[{i: >{w}}/{n}] found local file: {l}'
log.info(msg.format(i=i + 1, w=len(str(n)), n=n, l=local))
if os.path.isfile(local):
locals.append(local)
return sorted(locals)
@staticmethod
def __retrieve_ftp(url, filenames, outdir, username, password,
port=0, offline=False, lock_timeout=600):
files = list(set(filenames))
os.makedirs(outdir, exist_ok=True)
parsed = urlparse(url)
timeout = 100
if not offline:
if parsed.scheme == 'ftpes':
ftp = ftplib.FTP_TLS(host=parsed.netloc, timeout=timeout)
try:
ftp.login(username, password) # login anonymously before securing control channel
except ftplib.error_perm as e:
raise RuntimeError(str(e))
ftp.prot_p() # switch to secure data connection.. IMPORTANT! Otherwise, only the user and password is encrypted and not all the file data.
elif parsed.scheme == 'ftps':
ftp = ImplicitFTP_TLS()
ftp.connect(host=parsed.netloc, timeout=timeout, port=port)
ftp.login(username, password)
else:
ftp = ftplib.FTP(host=parsed.netloc, timeout=timeout)
ftp.login()
if parsed.path != '':
ftp.cwd(parsed.path)
else:
ftp = None
locals = []
n = len(files)
for i, remote in enumerate(files):
local = os.path.join(outdir, os.path.basename(remote))
with Lock(local, timeout=lock_timeout):
if not os.path.isfile(local) and not offline:
try:
targetlist = ftp.nlst(remote)
except ftplib.error_temp:
continue
address = '{}://{}/{}{}'.format(parsed.scheme, parsed.netloc,
parsed.path + '/' if parsed.path != '' else '',
remote)
msg = '[{i: >{w}}/{n}] {l} <<-- {r}'
log.info(msg.format(i=i + 1, w=len(str(n)), n=n, l=local, r=address))
with open(local, 'wb') as myfile:
ftp.retrbinary('RETR {}'.format(remote), myfile.write)
else:
msg = '[{i: >{w}}/{n}] found local file: {l}'
log.info(msg.format(i=i + 1, w=len(str(n)), n=n, l=local))
if os.path.isfile(local):
locals.append(local)
if ftp is not None:
ftp.close()
return sorted(locals)
@property
def config(self):
return {
'AW3D30': {'url': 'ftp://ftp.eorc.jaxa.jp/pub/ALOS/ext1/AW3D30/release_v1804',
'nodata': {'dem': -9999,
'msk': 3,
'stk': 0},
'resolution': {'0-90': (1 / 3600, 1 / 3600)},
'tilesize': 1,
'area_or_point': 'area',
'vsi': '/vsitar/',
'pattern': {'dem': '*DSM.tif',
'msk': '*MSK.tif',
'stk': '*STK.tif'},
'datatype': {'dem': 'Int16',
'msk': 'Byte',
'stk': 'Byte'},
'authentication': False
},
'Copernicus 10m EEA DEM': {'url': 'ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_EEA-10-DGED/2021_1',
'nodata': {'dem': -32767.0,
'edm': 8,
'flm': 1,
'hem': -32767.0,
'wbm': 1},
'resolution': {'0-50': (1 / 9000, 1 / 9000),
'50-60': (1.5 / 9000, 1 / 9000),
'60-70': (2 / 9000, 1 / 9000),
'70-80': (3 / 9000, 1 / 9000),
'80-85': (5 / 9000, 1 / 9000),
'85-90': (10 / 9000, 1 / 9000)},
'tilesize': 1,
'area_or_point': 'point',
'vsi': '/vsitar/',
'port': 990,
'pattern': {'dem': '*DEM.tif',
'edm': '*EDM.tif',
'flm': '*FLM.tif',
'hem': '*HEM.tif',
'wbm': '*WBM.tif'},
'datatype': {'dem': 'Float32',
'edm': 'Byte',
'flm': 'Byte',
'hem': 'Float32',
'wbm': 'Byte'},
'authentication': True
},
'Copernicus 30m Global DEM': {'url': 'https://copernicus-dem-30m-stac.s3.amazonaws.com',
'nodata': {'dem': -32767.0,
'edm': 8,
'flm': 1,
'hem': -32767.0,
'wbm': 1},
'resolution': {'0-50': (1 / 3600, 1 / 3600),
'50-60': (1.5 / 3600, 1 / 3600),
'60-70': (2 / 3600, 1 / 3600),
'70-80': (3 / 3600, 1 / 3600),
'80-85': (5 / 3600, 1 / 3600),
'85-90': (10 / 3600, 1 / 3600)},
'tilesize': 1,
'area_or_point': 'point',
'vsi': None,
'pattern': {'dem': '*DEM.tif',
'edm': '*EDM.tif',
'flm': '*FLM.tif',
'hem': '*HEM.tif',
'wbm': '*WBM.tif'},
'datatype': {'dem': 'Float32',
'edm': 'Byte',
'flm': 'Byte',
'hem': 'Float32',
'wbm': 'Byte'},
'authentication': False
},
'Copernicus 30m Global DEM II': {
'url': 'ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_GLO-30-DGED/2021_1',
'nodata': {'dem': -32767.0,
'edm': 8,
'flm': 1,
'hem': -32767.0,
'wbm': 1},
'resolution': {'0-50': (1 / 3600, 1 / 3600),
'50-60': (1.5 / 3600, 1 / 3600),
'60-70': (2 / 3600, 1 / 3600),
'70-80': (3 / 3600, 1 / 3600),
'80-85': (5 / 3600, 1 / 3600),
'85-90': (10 / 3600, 1 / 3600)},
'tilesize': 1,
'area_or_point': 'point',
'vsi': '/vsitar/',
'port': 990,
'pattern': {'dem': '*DEM.tif',
'edm': '*EDM.tif',
'flm': '*FLM.tif',
'hem': '*HEM.tif',
'wbm': '*WBM.tif'},
'datatype': {'dem': 'Float32',
'edm': 'Byte',
'flm': 'Byte',
'hem': 'Float32',
'wbm': 'Byte'},
'authentication': True
},
'Copernicus 90m Global DEM': {'url': 'https://copernicus-dem-90m-stac.s3.amazonaws.com',
'nodata': {'dem': -32767.0,
'edm': 8,
'flm': 1,
'hem': -32767.0,
'wbm': 1},
'resolution': {'0-50': (1 / 1200, 1 / 1200),
'50-60': (1.5 / 1200, 1 / 1200),
'60-70': (2 / 1200, 1 / 1200),
'70-80': (3 / 1200, 1 / 1200),
'80-85': (5 / 1200, 1 / 1200),
'85-90': (10 / 1200, 1 / 1200)},
'tilesize': 1,
'area_or_point': 'point',
'vsi': None,
'pattern': {'dem': '*DEM.tif',
'edm': '*EDM.tif',
'flm': '*FLM.tif',
'hem': '*HEM.tif',
'wbm': '*WBM.tif'},
'datatype': {'dem': 'Float32',
'edm': 'Byte',
'flm': 'Byte',
'hem': 'Float32',
'wbm': 'Byte'},
'authentication': False
},
'Copernicus 90m Global DEM II': {
'url': 'ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_GLO-90-DGED/2021_1',
'nodata': {'dem': -32767.0,
'edm': 8,
'flm': 1,
'hem': -32767.0,
'wbm': 1},
'resolution': {'0-50': (1 / 1200, 1 / 1200),
'50-60': (1.5 / 1200, 1 / 1200),
'60-70': (2 / 1200, 1 / 1200),
'70-80': (3 / 1200, 1 / 1200),
'80-85': (5 / 1200, 1 / 1200),
'85-90': (10 / 1200, 1 / 1200)},
'tilesize': 1,
'area_or_point': 'point',
'vsi': '/vsitar/',
'port': 990,
'pattern': {'dem': '*DEM.tif',
'edm': '*EDM.tif',
'flm': '*FLM.tif',
'hem': '*HEM.tif',
'wbm': '*WBM.tif'},
'datatype': {'dem': 'Float32',
'edm': 'Byte',
'flm': 'Byte',
'hem': 'Float32',
'wbm': 'Byte'},
'authentication': True
},
'GETASSE30': {'url': 'https://step.esa.int/auxdata/dem/GETASSE30',
'nodata': {'dem': None},
'resolution': {'0-90': (15 / 1800, 15 / 1800)},
'tilesize': 15,
'area_or_point': 'area',
'vsi': '/vsizip/',
'pattern': {'dem': '*.GETASSE30'},
'datatype': {'dem': 'Int16'},
'authentication': False
},
'SRTM 1Sec HGT': {'url': 'https://step.esa.int/auxdata/dem/SRTMGL1',
'nodata': {'dem': -32768.0},
'resolution': {'0-90': (1 / 3600, 1 / 3600)},
'tilesize': 1,
'area_or_point': 'point',
'vsi': '/vsizip/',
'pattern': {'dem': '*.hgt'},
'datatype': {'dem': 'Int16'},
'authentication': False
},
'SRTM 3Sec': {'url': 'https://step.esa.int/auxdata/dem/SRTM90/tiff',
'nodata': {'dem': -32768.0},
'resolution': {'0-90': (5 / 6000, 5 / 6000)},
'tilesize': 5,
'area_or_point': 'area',
'vsi': '/vsizip/',
'pattern': {'dem': 'srtm*.tif'},
'datatype': {'dem': 'Int16'},
'authentication': False
},
# 'TDX90m': {'url': 'ftpes://tandemx-90m.dlr.de',
# 'nodata': {'dem': -32767.0,
# 'am2': 0,
# 'amp': 0,
# 'com': 0,
# 'cov': 0,
# 'hem': -32767.0,
# 'lsm': 0,
# 'wam': 0},
# 'resolution': {'0-50': (1 / 1200, 1 / 1200),
# '50-60': (1.5 / 1200, 1 / 1200),
# '60-70': (2 / 1200, 1 / 1200),
# '70-80': (3 / 1200, 1 / 1200),
# '80-85': (5 / 1200, 1 / 1200),
# '85-90': (10 / 1200, 1 / 1200)},
# 'tilesize': 1,
# 'area_or_point': 'point',
# 'vsi': '/vsizip/',
# 'pattern': {'dem': '*_DEM.tif',
# 'am2': '*_AM2.tif',
# 'amp': '*_AMP.tif',
# 'com': '*_COM.tif',
# 'cov': '*_COV.tif',
# 'hem': '*_HEM.tif',
# 'lsm': '*_LSM.tif',
# 'wam': '*_WAM.tif'},
# 'datatype': {'dem': 'Float32',
# 'am2': 'UInt16',
# 'amp': 'UInt16',
# 'com': 'Byte',
# 'cov': 'Byte',
# 'hem': 'Float32',
# 'lsm': 'Byte',
# 'wam': 'Byte'},
# 'authentication': True
# }
}
def load(self, dem_type, vrt=None, buffer=None, username=None,
password=None, product='dem', crop=True, lock_timeout=600,
offline=False):
"""
Download DEM tiles. The result is either returned in a list of file
names combined into a VRT mosaic. The VRT is cropped to the combined
extent of the geometries, but the pixel grid of the source files is
preserved and no resampling/shifting is applied.
Parameters
----------
dem_type: str
the type fo DEM to be used
vrt: str or None
an optional GDAL VRT file created from the obtained DEM tiles
buffer: int or float or None
a buffer in degrees to add around the individual geometries
username: str or None
the download account username
password: str or None
the download account password
product: str
the sub-product to extract from the DEM product
- 'AW3D30'
* 'dem': the actual Digital Elevation Model
* 'msk': mask information for each pixel (Cloud/Snow Mask, Land water and
low correlation mask, Sea mask, Information of elevation dataset used
for the void-filling processing)
* 'stk': number of DSM-scene files which were used to produce the 5m resolution DSM
- 'Copernicus 10m EEA DEM'
* 'dem': the actual Digital Elevation Model
* 'edm': Editing Mask
* 'flm': Filling Mask
* 'hem': Height Error Mask
* 'wbm': Water Body Mask
- 'Copernicus 30m Global DEM'
* 'dem': the actual Digital Elevation Model
* 'edm': Editing Mask
* 'flm': Filling Mask
* 'hem': Height Error Mask
* 'wbm': Water Body Mask
- 'Copernicus 30m Global DEM II'
* 'dem': the actual Digital Elevation Model
* 'edm': Editing Mask
* 'flm': Filling Mask
* 'hem': Height Error Mask
* 'wbm': Water Body Mask
- 'Copernicus 90m Global DEM'
* 'dem': the actual Digital Elevation Model
* 'edm': Editing Mask
* 'flm': Filling Mask
* 'hem': Height Error Mask
* 'wbm': Water Body Mask
- 'Copernicus 90m Global DEM II'
* 'dem': the actual Digital Elevation Model
* 'edm': Editing Mask
* 'flm': Filling Mask
* 'hem': Height Error Mask
* 'wbm': Water Body Mask
- 'GETASSE30'
* 'dem': the actual Digital Elevation Model
- 'SRTM 1Sec HGT'
* 'dem': the actual Digital Elevation Model
- 'SRTM 3Sec'
* 'dem': the actual Digital Elevation Model
- 'TDX90m'
* 'dem': the actual Digital Elevation Model
* 'am2': Amplitude Mosaic representing the minimum value
* 'amp': Amplitude Mosaic representing the mean value
* 'com': Consistency Mask
* 'cov': Coverage Map
* 'hem': Height Error Map
* 'lsm': Layover and Shadow Mask, based on SRTM C-band and Globe DEM data
* 'wam': Water Indication Mask
crop: bool
If a VRT is created, crop it to the spatial extent of the provided geometries
or return the full extent of the DEM tiles? In the latter case, the common
bounding box of the geometries is expanded so that the coordinates are
multiples of the tile size of the respective DEM option.
lock_timeout: int
how long to wait to acquire a lock on the downloaded files?
offline: bool
work offline? If `True`, only locally existing files are considered
and no online check is performed. If a file is missing, an error is
raised. For this to work, the function needs to be run in `online`
mode once to create a local index.
Returns
-------
list[str] or None
the names of the obtained files or None if a VRT file was defined
"""
keys = self.config.keys()
if dem_type not in keys:
options = ', '.join(keys)
raise RuntimeError(f"DEM type '{dem_type}' is not supported.\n "
f"possible options: '{options}'")
products = self.config[dem_type]['pattern'].keys()
if product not in products:
options = ', '.join(products)
raise RuntimeError(f"Product '{product}' is not available "
f"for DEM type '{dem_type}'.\n"
f" options: '{options}'")
outdir = os.path.join(self.auxdatapath, 'dem', dem_type)
if self.geometries is not None:
candidates = []
for geo in self.geometries:
corners = self.__applybuffer(extent=geo.extent, buffer=buffer)
candidates.extend(self.remote_ids(extent=corners, dem_type=dem_type,
username=username, password=password,
product=product))
else:
candidates = self.remote_ids(extent=None, dem_type=dem_type,
username=username, password=password,
product=product)
if self.config[dem_type]['url'].startswith('ftp'):
port = 0
if 'port' in self.config[dem_type].keys():
port = self.config[dem_type]['port']
locals = self.__retrieve_ftp(url=self.config[dem_type]['url'],
filenames=candidates,
outdir=outdir, username=username,
password=password, port=port,
lock_timeout=lock_timeout,
offline=offline)
else:
locals = self.__retrieve(urls=candidates, outdir=outdir,
lock_timeout=lock_timeout,
offline=offline)
resolution = None
datatype = None
src_nodata = None
dst_nodata = None
tap = False
extent = self.__commonextent(buffer=buffer)
aop = self.config[dem_type]['area_or_point']
res = self.__get_resolution(dem_type=dem_type, y=extent['ymin'])
if not crop:
f = self.config[dem_type]['tilesize']
extent['xmin'] = floor(extent['xmin'] / f) * f
extent['ymin'] = floor(extent['ymin'] / f) * f
extent['xmax'] = ceil(extent['xmax'] / f) * f
extent['ymax'] = ceil(extent['ymax'] / f) * f
if aop == 'point':
shift_x = res[0] / 2
shift_y = res[1] / 2
extent['xmin'] -= shift_x
extent['ymin'] += shift_y
extent['xmax'] -= shift_x
extent['ymax'] += shift_y
# special case where no DEM tiles were found because the AOI is completely over ocean
if len(locals) == 0 and vrt is not None:
# define a dummy file as source file
# his file contains one pixel with a value of 0
# nodata value is 255
tif = vrt.replace('.vrt', '_tmp.tif')
self.__create_dummy_dem(filename=tif, extent=extent)
locals = [tif]
datatype = self.config[dem_type]['datatype'][product]
src_nodata = 0 # define the data value as nodata, so it can be overwritten in the VRT
if product == 'dem':
dst_nodata = 0
else:
dst_nodata = self.config[dem_type]['nodata'][product]
# determine the target resolution based on minimum latitude
resolution = self.__get_resolution(dem_type=dem_type, y=extent['ymin'])
# make sure all GETASSE30 tiles get an ENVI HDR file so that they are GDAL-readable
if dem_type == 'GETASSE30':
for item in locals:
getasse30_hdr(item)
if vrt is not None:
if src_nodata is None:
src_nodata = self.config[dem_type]['nodata'][product]
if dst_nodata is None:
dst_nodata = 0 if product == 'dem' else None
self.__buildvrt(tiles=locals, vrtfile=vrt,
pattern=self.config[dem_type]['pattern'][product],
vsi=self.config[dem_type]['vsi'],
extent=extent,
src_nodata=src_nodata, dst_nodata=dst_nodata,
hide_nodata=True,
resolution=resolution,
tap=tap, dst_datatype=datatype)
else:
return locals
def remote_ids(self, extent, dem_type, product='dem', username=None, password=None):
"""
parse the names/URLs of the remote files overlapping with an area of interest
Parameters
----------
extent: dict or None
the extent of the area of interest with keys xmin, xmax, ymin, ymax
or `None` to not set any spatial filter.
dem_type: str
the type fo DEM to be used
product: str
the sub-product to extract from the DEM product. Only needed for DEM options 'Copernicus 30m Global DEM'
and 'Copernicus 90m Global DEM' and ignored otherwise.
username: str or None
the download account username
password: str or None
the download account password
Returns
-------
str
the sorted names of the remote files
"""
keys = self.config.keys()
if dem_type not in keys:
raise RuntimeError("demType '{}' is not supported\n "
"possible options: '{}'"
.format(dem_type, "', '".join(keys)))
def ids(
x: int | None = None,
y: int | None = None,
nx: int = 3,
ny: int = 3,
reverse: bool = False
) -> tuple[str, str]:
if reverse:
pattern = '{c:0{n}d}{id}'
else:
pattern = '{id}{c:0{n}d}'
if x is not None:
xf = pattern.format(id='W' if x < 0 else 'E', c=abs(x), n=nx)
else:
xf = ''
if y is not None:
yf = pattern.format(id='S' if y < 0 else 'N', c=abs(y), n=ny)
else:
yf = ''
return yf, xf
def remotes_from_index(
indices: list[tuple[str, str]],
product: str | None
) -> list[str]:
lookup = self.__local_index(dem_type=dem_type)
remotes = []
for y, x in indices:
try:
if product is None:
remotes.append(lookup[y][x])
else:
remotes.append(lookup[y][x][product])
except KeyError:
pass
return remotes
if dem_type in ['Copernicus 30m Global DEM',
'Copernicus 90m Global DEM',
'SRTM 1Sec HGT']:
lat, lon = self.intrange(extent, step=1)
indices = [ids(x, y, nx=3, ny=2)
for x in lon for y in lat]
remotes = remotes_from_index(indices, product=product)
elif dem_type == 'GETASSE30':
lat, lon = self.intrange(extent, step=15)
indices = [ids(x, y, nx=3, ny=2, reverse=True)
for x in lon for y in lat]
remotes = remotes_from_index(indices, product=product)
elif dem_type == 'TDX90m':
lat, lon = self.intrange(extent, step=1)
remotes = []
for x in lon:
xr = abs(x) // 10 * 10
for y in lat:
yf, xf = ids(x=x, y=y, nx=3, ny=2)
remotes.append('DEM/{y}/{hem}{xr:03d}/TDM1_DEM__30_{y}{x}.zip'
.format(x=xf, xr=xr, y=yf, hem=xf[0]))
elif dem_type == 'AW3D30':
remotes = []
lat, lon = self.intrange(extent, step=1)
for x in lon:
for y in lat:
remotes.append(
'{0}{1}/{2}{3}.tar.gz'.format(*ids(x // 5 * 5, y // 5 * 5),
*ids(x, y)))
elif dem_type == 'SRTM 3Sec':
lat = range(
floor((60 - float(extent['ymax'])) / 5) + 1,
ceil((60 - float(extent['ymin'])) / 5) + 1
)
lon = range(
floor((float(extent['xmin']) + 180) / 5) + 1,
ceil((float(extent['xmax']) + 180) / 5) + 1
)
indices = [(f'{y:02d}', f'{x:02d}') for x in lon for y in lat]
remotes = remotes_from_index(indices, product=product)
elif dem_type in ['Copernicus 10m EEA DEM',
'Copernicus 30m Global DEM II',
'Copernicus 90m Global DEM II']:
lat, lon = self.intrange(extent, step=1)
indices = [''.join(ids(x, y, nx=3, ny=2))
for x in lon for y in lat]
outdir = os.path.join(self.auxdatapath, 'dem', dem_type)
mapping = os.path.join(outdir, 'mapping.csv')
mapping2 = os.path.join(outdir, 'mapping_append.csv')
def ftp_search(ftp, target):
out = []
if target.endswith('/'):
print(target)
content = ftp.nlst(target)
for item in content:
out.extend(ftp_search(ftp, target + item))
else:
if target.endswith('DEM.tar'):
out.append(target.to_str('latin-1').decode('utf-8'))
return out
def ftp_connect(host, path, username, password, port=990):
ftp = ImplicitFTP_TLS()
ftp.connect(host=host, port=port)
ftp.login(username, password)
ftp.cwd(path)
return ftp
if not os.path.isfile(mapping2):
parsed = urlparse(self.config[dem_type]['url'])
host = parsed.netloc
path = parsed.path
ftp = None
os.makedirs(outdir, exist_ok=True)
if not os.path.isfile(mapping):
print('downloading mapping.csv')
ftp = ftp_connect(host, path, username, password,
port=self.config[dem_type]['port'])
with open(mapping, 'wb') as myfile:
ftp.retrbinary('RETR mapping.csv', myfile.write)
print('searching FTP server')
if ftp is None:
ftp = ftp_connect(host, path, username, password,
port=self.config[dem_type]['port'])
files = ftp_search(ftp, path + '/')
files_base = [os.path.basename(x) for x in files]
if ftp is not None:
ftp.quit()
print('matching found files with mapping.csv')
with open(mapping) as obj:
reader = csv.reader(obj, delimiter=';')
with open(mapping2, 'w', newline='') as out:
writer = csv.writer(out, delimiter=';')
writer.writerow(next(reader)) # write header
for row in reader:
index = files_base.index(row[0])
row.append(files[index])
del files_base[index]
del files[index]
writer.writerow(row)
remotes = []
with open(mapping2) as obj:
stream = csv.reader(obj, delimiter=';')
for row in stream:
if row[1] + row[2] in indices:
remotes.append(row[-1])
else:
raise ValueError('unknown demType: {}'.format(dem_type))
return sorted(remotes)
def getasse30_hdr(fname):
"""
create an ENVI HDR file for zipped GETASSE30 DEM tiles
Parameters
----------
fname: str
the name of the zipped tile
Returns
-------
"""
basename = os.path.basename(fname)
pattern = r'(?P[0-9]{2})' \
'(?P[A-Z])' \
'(?P[0-9]{3})' \
'(?P[A-Z]).zip'
match = re.search(pattern, basename).groupdict()
lon = float(match['lon'])
if match['ew'] == 'W':
lon *= -1
lat = float(match['lat'])
if match['ns'] == 'S':
lat *= -1
posting = 30 / 3600 # 30 arc seconds
pixels = 1800
map_info = ['Geographic Lat/Lon', '1.0000', '1.0000',
str(lon),
str(lat + pixels * posting),
str(posting),
str(posting),
'WGS-84', 'units=Degrees']
with zf.ZipFile(fname, 'a') as zip:
files = zip.namelist()
hdr = basename.replace('.zip', '.hdr')
if hdr not in files:
with HDRobject() as obj:
obj.samples = pixels
obj.lines = pixels
obj.byte_order = 1
obj.data_type = 2
obj.map_info = '{{{}}}'.format(','.join(map_info))
obj.coordinate_system_string = crsConvert(4326, 'wkt')
zip.writestr(hdr, str(obj))
def get_dem_options(require_auth=None):
"""
Get the names of all supported DEM type options.
Parameters
----------
require_auth: bool or None
only return options that do/don't require authentication. Default None: return all options.
Returns
-------
list[str]
the names of the DEM options
"""
out = []
# create a dummy vector geometry for initializing the DEMHandler
ext = {'xmin': -44, 'xmax': -43, 'ymin': 30, 'ymax': 31}
with bbox(coordinates=ext, crs=4326) as vec:
with DEMHandler(geometries=[vec]) as handler:
for key, properties in handler.config.items():
if require_auth is None:
out.append(key)
else:
if require_auth == properties['authentication']:
out.append(key)
return sorted(out)
def get_egm_lookup(geoid, software):
"""
Download lookup tables for converting EGM geoid heights to WGS84 ellipsoid heights.
Parameters
----------
geoid: str
the geoid model; current options:
- SNAP: 'EGM96'
- PROJ: 'EGM96', 'EGM2008'
software: str
the software for which to download the EGM lookup
- SNAP: default directory: ``~/.snap/auxdata/dem/egm96``; URL:
* https://step.esa.int/auxdata/dem/egm96/ww15mgh_b.zip
- PROJ: requires ``PROJ_DATA`` or ``PROJ_LIB`` environment variable to be set as download directory; URLs:
* https://cdn.proj.org/us_nga_egm96_15.tif
* https://cdn.proj.org/us_nga_egm08_25.tif
Returns
-------
"""
if software == 'SNAP':
try:
auxdatapath = ExamineSnap().auxdatapath
except AttributeError:
auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')
local = os.path.join(auxdatapath, 'dem', 'egm96', 'ww15mgh_b.zip')
os.makedirs(os.path.dirname(local), exist_ok=True)
if not os.path.isfile(local):
remote = 'https://step.esa.int/auxdata/dem/egm96/ww15mgh_b.zip'
log.info('{} <<-- {}'.format(local, remote))
r = requests.get(remote)
r.raise_for_status()
with open(local, 'wb') as out:
out.write(r.content)
r.close()
elif software == 'PROJ':
lookup = {'EGM96': 'us_nga_egm96_15.tif',
'EGM2008': 'us_nga_egm08_25.tif'}
remote = 'https://cdn.proj.org/' + lookup[geoid]
# starting with PROJ 9.1, the PROJ_DATA variable is used.
# Earlier versions make use of PROJ_LIB.
var = 'PROJ_DATA'
proj_dir = os.environ.get(var)
if proj_dir is None:
var = 'PROJ_LIB'
proj_dir = os.environ.get(var)
if proj_dir is not None:
local = os.path.join(proj_dir, os.path.basename(remote))
if not os.path.isfile(local):
if not os.access(proj_dir, os.W_OK):
raise OSError("cannot write to '{0}' path: {1}".format(var, proj_dir))
log.info('{} <<-- {}'.format(local, remote))
r = requests.get(remote)
r.raise_for_status()
with open(local, 'wb') as out:
out.write(r.content)
r.close()
else:
raise RuntimeError("Neither environment variable 'PROJ_DATA' nor 'PROJ_LIB' are set")
else:
raise TypeError("software must be either 'SNAP' or 'PROJ'")
class ImplicitFTP_TLS(ftplib.FTP_TLS):
"""
FTP_TLS subclass that automatically wraps sockets in SSL to support implicit FTPS.
taken from https://stackoverflow.com/a/36049814
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._sock = None
@property
def sock(self):
"""Return the socket."""
return self._sock
@sock.setter
def sock(self, value):
"""When modifying the socket, ensure that it is ssl wrapped."""
if value is not None and not isinstance(value, ssl.SSLSocket):
value = self.context.wrap_socket(value)
self._sock = value
def vrt_check_sources(fname):
"""
check the sanity of all source files of a given VRT.
Currently does not check in-memory VRTs.
Parameters
----------
fname: str
the VRT file name
Returns
-------
Raises
------
RuntimeError
"""
if os.path.isfile(fname):
tree = etree.parse(fname)
sources = [x.text for x in tree.findall('.//SourceFilename')]
for source in sources:
if not os.path.isabs(source):
base_dir = os.path.dirname(fname)
source = os.path.normpath(os.path.join(base_dir, source))
if not os.path.isfile(source):
raise RuntimeError(f'missing VRT source file: {source}')
================================================
FILE: pyroSAR/config.py
================================================
# -*- coding: utf-8 -*-
###############################################################################
# pyroSAR configuration handling
# Copyright (c) 2018-2024, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
import os
import json
import configparser as ConfigParser
__LOCAL__ = ['acquisition_mode', 'coordinates', 'cycleNumber', 'frameNumber',
'lines', 'orbit', 'orbitNumber_abs', 'orbitNumber_rel',
'polarizations', 'product', 'projection', 'samples',
'sensor', 'spacing', 'start', 'stop']
class Singleton(type):
"""
Define an Instance operation that lets clients access its unique instance.
https://sourcemaking.com/design_patterns/singleton/python/1
"""
def __init__(cls, name, bases, attrs, **kwargs):
super().__init__(name, bases, attrs)
cls._instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__call__(*args, **kwargs)
return cls._instance
class ConfigHandler(metaclass=Singleton):
"""
ConfigHandler is a configuration handler for pyroSAR. It is intended to be called by a class's '__init__' and
set or get the configuration parameters throughout an entire package.
The primary goal with ConfigHandler is to load a single, consistent configuration environment to be passed
amongst ALL objects within a package.
ConfigHandler is a SINGLETON, meaning once instantiated, THE SAME OBJECT
will be returned to every class object calling it.
Parameters
----------
path : str or None
A path where the .pyrosar directory will be created. If None (default) it will be created in the user home
directory.
config_fname : str
Name of the config file. Default is 'config.ini'.
Methods
-------
make_dir : Create a .pyrosar directory in home directory.
create_config : Create a config.ini file in .pyrosar directory.
open : Open the config.ini file.
add_section : Create a new section in the configuration.
set : Set an option in the configuration.
remove_option : Remove an option in the configuration.
Notes
-----
The syntax is the same as in ConfigParser. Here, keys are called options.
"""
# Define __setter to control changeable keys (optional)
# __setter = ["etc", "auxdata"]
def __init__(self):
path = os.path.join(os.path.expanduser('~'), '.pyrosar')
self.__GLOBAL = {
'path': path,
'config_fname': 'config.ini',
'config': os.path.join(path, 'config.ini'),
}
if not os.path.isfile(self.__GLOBAL['config']):
self.__create_config()
self.parser = ConfigParser.RawConfigParser(allow_no_value=True)
self.parser.optionxform = str
self.parser.read(self.__GLOBAL['config'])
def __create_config(self):
"""
Create a config.ini file in .pyrosar directory.
Returns
-------
None
"""
if not os.path.exists(self.__GLOBAL['path']):
os.makedirs(self.__GLOBAL['path'])
with open(self.__GLOBAL['config'], 'w'):
pass
def __str__(self):
items = []
for section in self.parser.sections():
items.append(' Section: {0}\n'.format(section))
for options in self.parser.options(section):
items.append(' x {0} :: {1} :: {2}\n'
.format(options,
self.parser.get(section, options),
str(type(options))))
out = f'Class : {self.__class__.__name__}\n' \
f'Path : {self.__GLOBAL["config"]}\n' \
f'Sections : {len(self.parser.sections())}\n' \
f'Contents : \n{"".join(items)}'
return out
def __getitem__(self, section):
if not self.parser.has_section(section):
raise AttributeError('Section {0} does not exist.'.format(str(section)))
return dict(self.parser.items(section))
@property
def sections(self):
return self.parser.sections()
def keys(self, section):
"""
Get all keys (options) of a section.
Parameters
----------
section : str
Section name.
Returns
-------
list : options (keys) of a section.
"""
return self.parser.options(section)
def open(self):
"""
Open the config.ini file. This method will open the config.ini
file in an external standard app (text editor).
Returns
-------
os.startfile
"""
os.startfile(self.__GLOBAL['config'])
def add_section(self, section):
"""
Create a new section in the configuration.
Parameters
----------
section : str
Section name
Returns
-------
None
"""
if not self.parser.has_section(section):
self.parser.add_section(section)
self.write()
else:
raise RuntimeError('section already exists')
@property
def file(self):
return self.__GLOBAL['config']
def set(self, section, key, value, overwrite=False):
"""
Set an option.
Parameters
----------
section : str
Section name.
key : str
the attribute name
value :
the attribute value
overwrite : bool
If True and the defined key exists the value will be overwritten.
Returns
-------
"""
if not self.parser.has_section(section):
raise AttributeError('Section {0} does not exist.'.format(str(section)))
if isinstance(value, list):
value = json.dumps(value)
if key in self.parser.options(section) and not overwrite:
raise RuntimeError('Value already exists.')
self.parser.set(section, key, value)
self.write()
def remove_option(self, section, key):
"""
Remove an option and key.
Parameters
----------
section : str
Section name.
key : str
Key value.
Returns
-------
"""
if not self.parser.has_section(section):
raise AttributeError('Section {0} does not exist.'.format(str(section)))
if key not in self.parser.options(section):
raise AttributeError('Key {0} does not exist.'.format(str(key)))
self.parser.remove_option(section, key)
self.write()
def remove_section(self, section):
"""
remove a section
Parameters
----------
section: str
Section name.
Returns
-------
"""
self.parser.remove_section(section)
self.write()
def write(self):
with open(self.__GLOBAL['config'], 'w', encoding='utf8') as out:
self.parser.write(out)
================================================
FILE: pyroSAR/datacube_util.py
================================================
###############################################################################
# Convenience tools for Open Data Cube ingestion
# Copyright (c) 2018-2019, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
"""
This (still experimental) module is intended to easily prepare SAR scenes processed
by pyroSAR for ingestion into an Open Data Cube.
.. code-block:: python
from pyroSAR.datacube_util import Product, Dataset
from pyroSAR.ancillary import find_datasets
# find pyroSAR files by metadata attributes
archive_s1 = '/.../sentinel1/GRD/processed'
scenes_s1 = find_datasets(archive_s1, sensor=('S1A', 'S1B'), acquisition_mode='IW')
# group the found files by their file basenames
# files with the same basename are considered to belong to the same dataset
grouped = groupby(scenes_s1, 'outname_base')
# define the polarization units describing the data sets
units = {'VV': 'backscatter VV', 'VH': 'backscatter VH'}
# create a new product
with Product(name='S1_GRD_index',
product_type='gamma0',
description='Gamma Naught RTC backscatter') as prod:
for dataset in grouped:
with Dataset(dataset, units=units) as ds:
# add the dataset to the product
prod.add(ds)
# parse datacube indexing YMLs from product and data set metadata
prod.export_indexing_yml(ds, 'yml_index_outdir')
# write the product YML
prod.write('yml_product')
# print the product metadata which is written to the product YML
print(prod)
"""
import os
import re
import yaml
import uuid
from time import strftime, strptime
from spatialist.raster import Raster, Dtype
from spatialist.ancillary import union
from .ancillary import parse_datasetname
import logging
log = logging.getLogger(__name__)
class Dataset(object):
"""
A general class describing dataset information required for creating ODC YML files
Parameters
----------
filename: str, list, Dataset
the product to be used; either an existing :class:`Dataset` object or a (list of) file(s) matching the pyroSAR
naming pattern, i.e. that can be parsed by :func:`pyroSAR.ancillary.parse_datasetname`
units: str or dict
the units of the product measurement
"""
def __init__(self, filename, units='DN'):
if isinstance(filename, list):
combined = sum([Dataset(x, units) for x in filename])
self.__init__(combined)
elif isinstance(filename, Dataset):
for attr, value in vars(filename).items():
setattr(self, attr, value)
elif isinstance(filename, str):
# map pyroSAR sensor identifiers to platform and instrument codes
sensor_lookup = {'ASAR': ('ENVISAT', 'ASAR'),
'ERS1': ('ERS-1', 'SAR'),
'ERS2': ('ERS-2', 'SAR'),
'PSR1': ('ALOS-1', 'PALSAR'),
'PSR2': ('ALOS-2', 'PALSAR-2'),
'S1A': ('SENTINEL-1', 'C-SAR'),
'S1B': ('SENTINEL-1', 'C-SAR'),
'S1C': ('SENTINEL-1', 'C-SAR'),
'S1D': ('SENTINEL-1', 'C-SAR'),
'TSX1': ('TERRASAR-X_1', 'SAR'),
'TDX1': ('TANDEM-X_1', 'SAR')}
# extract basic metadata attributes from the filename and register them to the object
meta = parse_datasetname(filename)
if meta is None:
raise ValueError('could not identify dataset: {}'.format(filename))
for key, val in meta.items():
setattr(self, key, val)
# define acquisition start and end time; Currently both are set to the acquisition start time,
# which is contained in the filename
# Time will only be correct if the full scene was processed, start and end time of s subset will
# differ. Thus, accurately setting both is not seen as too relevant.
self.from_dt = strftime('%Y-%m-%dT%H:%M:%S', strptime(self.start, '%Y%m%dT%H%M%S'))
self.to_dt = strftime('%Y-%m-%dT%H:%M:%S', strptime(self.start, '%Y%m%dT%H%M%S'))
# match the sensor ID from the filename to a platform and instrument
if self.sensor not in sensor_lookup.keys():
raise ValueError('unknown sensor: {}'.format(self.sensor))
self.platform, self.instrument = sensor_lookup[self.sensor]
# extract general geo metadata from the GTiff information
with Raster(filename) as ras:
self.dtype = Dtype(ras.dtype).numpystr
self.nodata = ras.nodata
self.format = ras.format
self.xres, self.yres = ras.res
self.crs = 'EPSG:{}'.format(ras.epsg)
self.is_projected = ras.projcs is not None
self.extent = self.__extent_convert(ras.geo, 'x', 'y')
# reproject the raster bounding box to EPSG 4326 and store its extent
with ras.bbox() as bbox:
bbox.reproject(4326)
self.extent_4326 = self.__extent_convert(bbox.extent, 'lon', 'lat')
# create dictionary for resolution metadata depending on CRS characteristics
resolution_keys = ('x', 'y') if self.is_projected else ('longitude', 'latitude')
self.resolution = dict(zip(resolution_keys, (self.xres, self.yres)))
# check whether the data type is supported
pattern = '(?:(?:u|)int(?:8|16|32|64)|float(?:32|64))'
if not re.search(pattern, self.dtype):
raise ValueError('unsupported data type {}'.format(self.dtype))
# determine the dataset units
if isinstance(units, str):
units = units
elif isinstance(units, dict):
try:
units = units[self.polarization]
except KeyError:
raise KeyError("parameter 'units' does not contain key '{}'".format(self.polarization))
else:
raise TypeError("parameter 'units' must be of type str or dict")
# create the measurement entry from collected metadata;
# this is intended for easy access by class Product
self.measurements = {self.polarization: {'dtype': self.dtype,
'name': self.polarization,
'nodata': self.nodata,
'filename': filename,
'units': units}}
else:
raise TypeError('filename must be of type str, list or Dataset')
def __add__(self, dataset):
"""
override the + operator. This is intended to easily combine two Dataset objects, which were
created from different files belonging to the same measurement, e.g. two GeoTIFFs with one polarization
each.
Parameters
----------
dataset: Dataset
the dataset to add to the current one
Returns
-------
Dataset
the combination of the two
"""
for attr in ['extent', 'crs', 'sensor', 'acquisition_mode', 'proc_steps', 'outname_base']:
if getattr(self, attr) != getattr(dataset, attr):
raise ValueError('value mismatch: {}'.format(attr))
# self.filename.append(dataset.filename)
for key in dataset.measurements.keys():
if key in self.measurements.keys():
raise RuntimeError('only different measurements can be combined to one dataset')
self.measurements.update(dataset.measurements)
return self
def __radd__(self, dataset):
"""
similar to :meth:`Dataset.__add__` but for function :func:`sum`, e.g. :code:`sum([Dataset1, Dataset2])`
Parameters
----------
dataset: Dataset
the dataset to add to the current one
Returns
-------
Dataset
the combination of the two
"""
if dataset == 0:
return self
else:
return self.__add__(dataset)
@staticmethod
def __extent_convert(extent, xkey, ykey):
"""
convert the extent of a :class:`~spatialist.raster.Raster` object to a
datacube-compliant dictionary.
Parameters
----------
extent: dict
the extent as returned by a :class:`~spatialist.raster.Raster` object
xkey: {'longitude', 'x'}
the key of the x dimension
ykey: {'latitude', 'y'}
the key of the y dimension
Returns
-------
dict
a dictionary with keys `ll`, `lr`, `ul` and ``ur
"""
return {'ll': {xkey: extent['xmin'],
ykey: extent['ymin']},
'lr': {xkey: extent['xmax'],
ykey: extent['ymin']},
'ul': {xkey: extent['xmin'],
ykey: extent['ymax']},
'ur': {xkey: extent['xmax'],
ykey: extent['ymax']}}
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def __get_measurement_attr(self, attr):
"""
get a certain measurement attribute from all measurements
Parameters
----------
attr: str
the attribute to get
Returns
-------
dict
a dictionary with the measurement names as keys and the respective attribute as value
"""
return dict([(key, self.measurements[key][attr]) for key in self.measurements.keys()])
@property
def filenames(self):
"""
Returns
-------
dict
all file names registered in the dataset
"""
return self.__get_measurement_attr('filename')
@property
def identifier(self):
"""
Returns
-------
str
a unique dataset identifier
"""
return '{}_{}'.format(self.outname_base, '_'.join(self.proc_steps))
@property
def units(self):
"""
Returns
-------
dict
all measurement unit names registered in the dataset
"""
return self.__get_measurement_attr('units')
@units.setter
def units(self, value):
"""
(re)set the units of all measurements
Parameters
----------
value: str or dict
the unit(s) to be set; if multiple measurements are present,
a dictionary with measurement names as keys needs to be defined
Returns
-------
"""
keys = list(self.measurements.keys())
if isinstance(value, str):
if len(keys) == 1:
self.measurements[keys[0]]['units'] = value
else:
raise TypeError('the dataset contains multiple measurements; '
'in this case a dictionary is needed for setting the measurement units')
elif isinstance(value, dict):
for name, unit in value.items():
if name in keys:
self.measurements[name]['units'] = unit
else:
raise KeyError("the dataset does not contain a measurement '{}'".format(name))
def close(self):
return
class Product(object):
"""
A class for describing an ODC product definition
Parameters
----------
definition: str, list, None
the source of the product definition; either an existing product YML, a list of :class:`Dataset` objects,
or None. In the latter case the product is defined using the parameters
`name`, `product_type` and `description`.
name: str
the name of the product in the data cube
product_type: str
the type of measurement defined in the product, e.g. `gamma0`
description: str
the description of the product and its measurements
"""
def __init__(self, definition=None, name=None, product_type=None,
description=None):
missing_message = "when initializing {}, parameters " \
"'name', 'product_type' and 'description' must be defined"
if isinstance(definition, str):
if os.path.isfile(definition):
with open(definition, 'r') as yml:
try:
self.meta = yaml.load(yml, Loader=yaml.FullLoader)
except yaml.YAMLError:
raise RuntimeError('the provided file does not seem to be a YAML file')
else:
raise RuntimeError('definition file does not exist')
elif isinstance(definition, list):
if None in [name, product_type, description]:
raise ValueError(missing_message.format(' a product from list'))
self.__initialize(name, product_type, description)
for dataset in definition:
with Dataset(dataset) as DS:
self.add(DS)
elif definition is None:
if None in [name, product_type, description]:
raise ValueError(missing_message.format('a blank product'))
self.__initialize(name, product_type, description)
else:
raise TypeError('type of parameter definition must be either str, list or None')
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def __str__(self):
return yaml.dump(self.meta, default_flow_style=False)
def __getattr__(self, item):
if item in self.__fixture_storage:
return self.meta['storage'][item]
elif item in self.__fixture_metadata:
subkey = 'code' if item == 'platform' else 'name'
return self.meta['metadata'][item][subkey]
elif item == 'product_type':
return self.meta['metadata']['product_type']
else:
return object.__getattribute__(self, item)
def __setattr__(self, key, value):
if key in self.__fixture_storage:
self.meta['storage'][key] = value
elif key in self.__fixture_metadata:
subkey = 'code' if key == 'platform' else 'name'
self.meta['metadata'][key][subkey] = value
elif key == 'product_type':
self.meta['metadata']['product_type'] = value
else:
super(Product, self).__setattr__(key, value)
def close(self):
return
def __add_measurement(self, name, dtype, nodata, units):
"""
create a new measurement entry
Parameters
----------
name: str
the measurement name
dtype: str
the data type, e.g. float32
nodata: int or float
the nodata value of the data
units: str
the measurement units
Returns
-------
"""
if name in self.measurements.keys():
raise IndexError('measurement {} already exists'.format(name))
self.meta['measurements'].append({'name': name,
'dtype': dtype,
'units': units,
'nodata': nodata})
def __initialize(self, name, product_type, description):
"""
create a new blank product
Parameters
----------
name: str
the name of the product
product_type: str
the product type, e.g. `gamma0`
description: str
a description of the product content/purpose
Returns
-------
"""
self.meta = {'description': description,
'measurements': [],
'metadata': {'platform': {'code': None},
'instrument': {'name': None},
'format': {'name': None},
'product_type': product_type},
'metadata_type': 'eo',
'name': name,
'storage': {'crs': None,
'resolution': None}}
@staticmethod
def __check_dict_keys(keys, reference):
return len(union(keys, reference)) == len(keys)
@property
def __fixture_fields(self):
"""
Returns
-------
list
the names of the top-level metadata fields, which must be defined
"""
return ['description', 'measurements', 'metadata', 'metadata_type', 'name', 'storage']
@property
def __fixture_measurement(self):
"""
Returns
-------
list
the names of the metadata fields, which must be defined for all measurements
"""
return ['dtype', 'nodata', 'units']
@property
def __fixture_metadata(self):
"""
Returns
-------
list
the names of the metadata fields, which must be defined in the general metadata section
"""
return ['format', 'instrument', 'platform']
@property
def __fixture_storage(self):
"""
Returns
-------
list
the names of the metadata fields, which must be defined for the storage section
"""
return ['crs', 'resolution']
def __validate(self):
"""
assert whether the Product is valid
Returns
-------
Raises
------
RuntimeError
"""
try:
assert isinstance(self.meta, dict)
assert self.__check_dict_keys(self.__fixture_fields, self.meta.keys())
assert 'product_type' in self.meta['metadata'].keys()
for measurement in self.meta['measurements']:
assert self.__check_dict_keys(self.__fixture_measurement, measurement.keys())
except AssertionError as e:
log.info(e)
raise RuntimeError('product invalid')
def add(self, dataset):
"""
Add a dataset to the abstracted product description. This first performs a check
whether the dataset is compatible with the product and its already existing measurements.
If a measurement in the dataset does not yet exist in the product description it is added.
Parameters
----------
dataset: Dataset
the dataset whose description is to be added
Returns
-------
"""
if not isinstance(dataset, Dataset):
raise TypeError('input must be of type pyroSAR.datacube.Dataset')
self.check_integrity(dataset, allow_new_measurements=True)
# set the general product definition attributes if they are None
for attr in self.__fixture_metadata + self.__fixture_storage:
if getattr(self, attr) is None:
setattr(self, attr, getattr(dataset, attr))
# if it is not yet present, add the dataset measurement definition to that of the product
for measurement, content in dataset.measurements.items():
if measurement not in self.measurements.keys():
self.__add_measurement(dtype=content['dtype'],
name=content['name'],
nodata=content['nodata'],
units=content['units'])
def check_integrity(self, dataset, allow_new_measurements=False):
"""
check if a dataset is compatible with the product definition.
Parameters
----------
dataset: Dataset
the dataset to be checked
allow_new_measurements: bool
allow new measurements to be added to the product definition?
If not and the dataset contains measurements,
which are not defined in the product, an error is raised.
Returns
-------
Raises
------
RuntimeError
"""
# check general metadata and storage fields
for attr in self.__fixture_metadata + self.__fixture_storage:
val_ds = getattr(dataset, attr)
val_prod = getattr(self, attr)
if val_prod is not None and val_ds != val_prod:
raise RuntimeError("mismatch of attribute '{0}': {1}, {2}".format(attr, val_ds, val_prod))
# check measurement fields
for measurement, content in dataset.measurements.items():
if measurement not in self.measurements.keys():
if not allow_new_measurements:
raise RuntimeError("measurement '{}' is not present in the product definition "
"and allow_new_measurements is set to False".format(measurement))
else:
match = self.measurements[measurement]
for attr in self.__fixture_measurement:
if match[attr] != content[attr]:
raise RuntimeError("mismatch of measurement '{0}', "
"attribute '{1}': {2}, {3}".
format(measurement, attr, match[attr], content[attr]))
def export_indexing_yml(self, dataset, outdir):
"""
Write a YML file named {:meth:`Dataset.identifier`}_dcindex.yml, which can be used for indexing a dataset in
an Open Data Cube. The file will contain information from the product and the dataset and a test is first
performed to check whether the dataset matches the product definition.
A unique ID is issued using :func:`uuid.uuid4()`.
Parameters
----------
dataset: Dataset
the dataset for which to export a file for
outdir: str
the directory to write the file to
Returns
-------
"""
self.__validate()
outname = os.path.join(outdir, dataset.identifier + '_dcindex.yml')
if os.path.isfile(outname):
raise RuntimeError('indexing YML already exists: \n {}'.format(outname))
if not os.path.isdir(outdir):
os.makedirs(outdir)
self.check_integrity(dataset)
out = {'id': str(uuid.uuid4()),
'image': {'bands': {}},
'grid_spatial': {'projection': {}},
'extent': {'coord': {}},
'lineage': {'source_datasets': {}}}
for measurement, content in dataset.measurements.items():
out['image']['bands'][measurement] = {'path': content['filename']}
for attr in self.__fixture_metadata:
subkey = 'code' if attr == 'platform' else 'name'
out[attr] = {subkey: getattr(dataset, attr)}
out['grid_spatial']['projection']['geo_ref_points'] = dataset.extent
out['grid_spatial']['projection']['spatial_reference'] = dataset.crs
out['extent']['coord'] = dataset.extent_4326
out['extent']['from_dt'] = dataset.from_dt
out['extent']['to_dt'] = dataset.to_dt
out['product_type'] = self.meta['metadata']['product_type']
with open(outname, 'w') as yml:
yaml.dump(out, yml, default_flow_style=False)
def export_ingestion_yml(self, outname, product_name, ingest_location, chunking):
"""
Write a YML file, which can be used for ingesting indexed datasets into an Open Data Cube.
Parameters
----------
outname: str
the name of the YML file to write
product_name: str
the name of the product in the ODC
ingest_location: str
the location of the ingested NetCDF files
chunking: dict
a dictionary with keys 'x', 'y' and 'time'; determines the size of the netCDF
files ingested into the datacube; e.g. {'x': 512, 'y': 512, 'time': 1}
Returns
-------
"""
if os.path.isfile(outname):
raise RuntimeError('product definition YML already exists: \n {}'.format(outname))
self.__validate()
if product_name == self.meta['name']:
raise ValueError('source and target product names must be different')
outdir = os.path.dirname(outname)
if not os.path.isdir(outdir):
os.makedirs(outdir)
file_path_template = '{0}/{1}_{2}_{3}_{4}_' \
'{{tile_index[0]}}_' \
'{{tile_index[1]}}_' \
'{{start_time}}.nc'.format(product_name,
self.platform,
self.instrument,
self.product_type,
self.crs.replace('EPSG:', ''))
global_attributes = {'instrument': self.instrument,
'platform': self.platform,
'institution': 'ESA',
'achknowledgment': 'Sentinel-1 data is provided by the European Space Agency '
'on behalf of the European Commission via download.'}
storage = self.meta['storage']
storage['driver'] = 'NetCDF CF'
storage['tile_size'] = {}
storage['tile_size']['x'] = storage['resolution']['x'] * chunking['x']
storage['tile_size']['y'] = storage['resolution']['y'] * chunking['y']
storage['chunking'] = chunking
storage['dimension_order'] = ['time', 'y', 'x']
measurements = self.meta['measurements']
for measurement in measurements:
measurement['resampling_method'] = 'nearest'
measurement['src_varname'] = measurement['name']
out = {'source_type': self.meta['name'],
'output_type': product_name,
'description': self.meta['description'],
'location': ingest_location,
'file_path_template': file_path_template,
'storage': self.meta['storage'],
'measurements': self.meta['measurements'],
'global_attributes': global_attributes}
with open(outname, 'w') as yml:
yaml.dump(out, yml, default_flow_style=False)
@property
def measurements(self):
"""
Returns
-------
dict of dict
a dictionary with measurement names as keys
"""
return dict([(x['name'], x) for x in self.meta['measurements']])
def write(self, ymlfile):
"""
write the product definition to a YML file
Parameters
----------
ymlfile: str
the file to write to
Returns
-------
"""
if os.path.isfile(ymlfile):
raise RuntimeError('ingestion YML already exists: \n {}'.format(ymlfile))
self.__validate()
with open(ymlfile, 'w') as yml:
yaml.dump(self.meta, yml, default_flow_style=False)
================================================
FILE: pyroSAR/drivers.py
================================================
###############################################################################
# Reading and Organizing system for SAR images
# Copyright (c) 2016-2026, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
"""
This is the core module of package pyroSAR.
It contains the drivers for the different SAR image formats and offers
functionality for retrieving metadata, unpacking images, downloading ancillary files like DEMs and
Orbit State Vector files as well as archiving scenes in a database.
The :class:`ID` class and its subclasses allow easy and standardized access to the metadata of
images from different SAR sensors.
"""
from __future__ import annotations
from builtins import str
from io import BytesIO
import abc
import ast
import math
import os
import re
import shutil
import struct
import operator
import tarfile as tf
import xml.etree.ElementTree as ET
import zipfile as zf
from datetime import datetime, timezone, timedelta
from dateutil.parser import parse as dateparse
from time import strptime, strftime
from statistics import mean, median
from itertools import groupby
from PIL import Image
import progressbar as pb
from osgeo import gdal, osr, ogr
from osgeo.gdalconst import GA_ReadOnly
import numpy as np
from . import S1, patterns
from .config import __LOCAL__
from .ERS import passdb_query, get_resolution_nesz
from .xml_util import getNamespaces
from spatialist import crsConvert, Vector, bbox
from spatialist.ancillary import parse_literal, finder, multicore
import logging
log = logging.getLogger(__name__)
def identify(scene):
"""
identify a SAR scene and return the appropriate metadata handler object
Parameters
----------
scene: str
a file or directory name
Returns
-------
pyroSAR.drivers.ID
a pyroSAR metadata handler
Examples
--------
>>> from pyroSAR import identify
>>> filename = 'S1A_IW_GRDH_1SDV_20180829T170656_20180829T170721_023464_028DE0_F7BD.zip'
>>> scene = identify(filename)
>>> print(scene)
pyroSAR ID object of type SAFE
acquisition_mode: IW
cycleNumber: 148
frameNumber: 167392
lines: 16703
orbit: A
orbitNumber_abs: 23464
orbitNumber_rel: 117
polarizations: ['VV', 'VH']
product: GRD
projection: +proj=longlat +datum=WGS84 +no_defs
samples: 26056
sensor: S1A
spacing: (10.0, 10.0)
start: 20180829T170656
stop: 20180829T170721
"""
if not os.path.exists(scene):
raise OSError("No such file or directory: '{}'".format(scene))
def get_subclasses(c):
subclasses = c.__subclasses__()
for subclass in subclasses.copy():
subclasses.extend(get_subclasses(subclass))
return list(set(subclasses))
for handler in get_subclasses(ID):
try:
return handler(scene)
except Exception:
pass
raise RuntimeError('data format not supported')
def identify_many(scenes, pbar=False, sortkey=None, cores=1):
"""
wrapper function for returning metadata handlers of all valid scenes in a list,
similar to function :func:`~pyroSAR.drivers.identify`.
Parameters
----------
scenes: list[str or ID]
the file names of the scenes to be identified
pbar: bool
adds a progressbar if True
sortkey: str or None
sort the handler object list by an attribute
cores: int
the number of cores to parallelize identification
Returns
-------
list[ID]
a list of pyroSAR metadata handlers
Examples
--------
>>> from pyroSAR import identify_many
>>> files = finder('/path', ['S1*.zip'])
>>> ids = identify_many(files, pbar=False, sortkey='start')
"""
def handler(scene):
if isinstance(scene, ID):
return scene
else:
try:
id = identify(scene)
return id
except RuntimeError:
return None
except PermissionError:
log.warning("Permission denied: '{}'".format(scene))
if cores == 1:
idlist = []
if pbar:
progress = pb.ProgressBar(max_value=len(scenes)).start()
else:
progress = None
for i, scene in enumerate(scenes):
id = handler(scene)
idlist.append(id)
if progress is not None:
progress.update(i + 1)
if progress is not None:
progress.finish()
else:
idlist = multicore(function=handler, multiargs={'scene': scenes},
pbar=pbar, cores=cores)
if sortkey is not None:
idlist.sort(key=operator.attrgetter(sortkey))
idlist = list(filter(None, idlist))
return idlist
def filter_processed(scenelist, outdir, recursive=False):
"""
Filter a list of pyroSAR objects to those that have not yet been processed and stored in the defined directory.
The search for processed scenes is either done in the directory only or recursively into subdirectories.
The scenes must have been processed with pyroSAR in order to follow the right naming scheme.
Parameters
----------
scenelist: list[ID]
a list of pyroSAR objects
outdir: str
the processing directory
recursive: bool
scan `outdir` recursively into subdirectories?
Returns
-------
list[ID]
a list of those scenes, which have not been processed yet
"""
return [x for x in scenelist if not x.is_processed(outdir, recursive)]
class ID(object):
"""
Abstract class for SAR meta data handlers
"""
def __init__(self, metadict):
"""
to be called by the __init__methods of the format drivers
scans a metadata dictionary and registers entries with a standardized name as object attributes
see __LOCAL__ for standard names. It must be ensured that each of these is actually read by the individual SAR format driver.
:param metadict: a dictionary containing the metadata attributes of a SAR scene
"""
self.locals = __LOCAL__
for item in self.locals:
setattr(self, item, metadict[item])
def __getattr__(self, item):
raise AttributeError("object has no attribute '{}'".format(item))
def __str__(self):
lines = ['pyroSAR ID object of type {}'.format(self.__class__.__name__)]
for item in sorted(self.locals):
value = getattr(self, item)
if item == 'projection':
value = crsConvert(value, 'proj4') if value is not None else None
if value == -1:
value = ''
line = '{0}: {1}'.format(item, value)
lines.append(line)
return '\n'.join(lines)
def bbox(self, outname=None, driver=None, overwrite=True, buffer=None):
"""
get the bounding box of a scene. The result is either returned as
vector object or written to a file.
Parameters
----------
outname: str
the name of the vector file to be written
driver: str
the output file format; needs to be defined if the format cannot
be auto-detected from the filename extension
overwrite: bool
overwrite an existing vector file?
buffer: None or int or float or tuple[int or float]
a buffer to add around `coordinates`. Default None: do not add
a buffer. A tuple is interpreted as (x buffer, y buffer).
Returns
-------
~spatialist.vector.Vector or None
the vector object if `outname` is None and None otherwise
See Also
--------
spatialist.vector.Vector.bbox
"""
if outname is None:
return bbox(coordinates=self.getCorners(), crs=self.projection,
buffer=buffer)
else:
bbox(coordinates=self.getCorners(), crs=self.projection,
outname=outname, driver=driver, overwrite=overwrite,
buffer=buffer)
def geometry(self, outname=None, driver=None, overwrite=True):
"""
get the footprint geometry of a scene either as a vector object or written to a file
Parameters
----------
outname: str
the name of the vector file to be written
driver: str
the output file format; needs to be defined if the format cannot
be auto-detected from the filename extension
overwrite: bool
overwrite an existing vector file?
Returns
-------
~spatialist.vector.Vector or None
the vector object if `outname` is None, None otherwise
See also
--------
spatialist.vector.Vector.write
"""
if 'coordinates' not in self.meta.keys():
raise NotImplementedError
srs = crsConvert(self.projection, 'osr')
points = ogr.Geometry(ogr.wkbMultiPoint)
for lon, lat in self.meta['coordinates']:
point = ogr.Geometry(ogr.wkbPoint)
point.AddPoint(lon, lat)
points.AddGeometry(point)
geom = points.ConvexHull()
geom.FlattenTo2D()
point = points = None
exterior = geom.GetGeometryRef(0)
if exterior.IsClockwise():
points = list(exterior.GetPoints())
exterior.Empty()
for x, y in reversed(points):
exterior.AddPoint(x, y)
geom.CloseRings()
exterior = points = None
bbox = Vector(driver='MEM')
bbox.addlayer('geometry', srs, geom.GetGeometryType())
bbox.addfield('area', ogr.OFTReal)
bbox.addfeature(geom, fields={'area': geom.Area()})
geom = None
if outname is None:
return bbox
else:
bbox.write(outfile=outname, driver=driver, overwrite=overwrite)
@property
def compression(self):
"""
check whether a scene is compressed into an tarfile or zipfile or not at all
Returns
-------
str or None
either 'zip', 'tar' or None
"""
if os.path.isdir(self.scene):
return None
elif zf.is_zipfile(self.scene):
return 'zip'
elif tf.is_tarfile(self.scene):
return 'tar'
else:
return None
def export2dict(self):
"""
Return the uuid and the metadata that is defined in `self.locals` as a dictionary
"""
metadata = {item: self.meta[item] for item in self.locals}
sq_file = os.path.basename(self.file)
title = os.path.splitext(sq_file)[0]
metadata['uuid'] = title
return metadata
def examine(self, include_folders=False):
"""
check whether any items in the SAR scene structure (i.e. files/folders) match the regular expression pattern
defined by the class. On success the item is registered in the object as attribute `file`.
Parameters
----------
include_folders: bool
also match folder (or just files)?
Returns
-------
Raises
-------
RuntimeError
"""
files = self.findfiles(self.pattern, include_folders=include_folders)
if len(files) == 1:
self.file = files[0]
elif len(files) == 0:
raise RuntimeError('scene does not match {} naming convention'.format(type(self).__name__))
else:
raise RuntimeError('file ambiguity detected:\n{}'.format('\n'.join(files)))
def findfiles(self, pattern, include_folders=False):
"""
find files in the scene archive, which match a pattern.
Parameters
----------
pattern: str
the regular expression to match
include_folders: bool
also match folders (or just files)?
Returns
-------
list[str]
the matched file names
See Also
--------
:func:`spatialist.ancillary.finder`
"""
foldermode = 1 if include_folders else 0
try:
files = finder(target=self.scene, matchlist=[pattern],
foldermode=foldermode, regex=True)
except RuntimeError:
# Return the scene if only a file and not zip
return self.scene
if os.path.isdir(self.scene) \
and re.search(pattern, os.path.basename(self.scene)) \
and include_folders:
files.append(self.scene)
return files
def gdalinfo(self):
"""
read metadata directly from the GDAL SAR image drivers
Returns
-------
dict
the metadata attributes
"""
files = self.findfiles(r'(?:\.[NE][12]$|DAT_01\.001$|product\.xml|manifest\.safe$)')
# If only one file return the file in array
if isinstance(files, str):
files = [files]
if len(files) == 1:
prefix = {'zip': '/vsizip/', 'tar': '/vsitar/', None: ''}[self.compression]
header = files[0]
elif len(files) > 1:
raise RuntimeError('file ambiguity detected')
else:
raise RuntimeError('file type not supported')
meta = {}
ext_lookup = {'.N1': 'ASAR', '.E1': 'ERS1', '.E2': 'ERS2'}
extension = os.path.splitext(header)[1]
if extension in ext_lookup:
meta['sensor'] = ext_lookup[extension]
info = gdal.Info(prefix + header, options=gdal.InfoOptions(allMetadata=True, format='json'))
meta['extra'] = info
img = gdal.Open(prefix + header, GA_ReadOnly)
gdalmeta = img.GetMetadata()
meta['samples'], meta['lines'], meta['bands'] = img.RasterXSize, img.RasterYSize, img.RasterCount
meta['projection'] = img.GetGCPProjection()
meta['gcps'] = [((x.GCPPixel, x.GCPLine), (x.GCPX, x.GCPY, x.GCPZ)) for x in img.GetGCPs()]
img = None
for item in gdalmeta:
entry = [item, parse_literal(gdalmeta[item].strip())]
try:
entry[1] = self.parse_date(str(entry[1]))
except ValueError:
pass
if re.search('LAT|LONG', entry[0]):
entry[1] /= 1000000.
meta[entry[0]] = entry[1]
return meta
def getCorners(self):
"""
Get the bounding box corner coordinates
Returns
-------
dict
the corner coordinates as a dictionary with keys `xmin`, `ymin`, `xmax`, `ymax`
"""
if 'coordinates' not in self.meta.keys():
raise NotImplementedError
coordinates = self.meta['coordinates']
lat = [x[1] for x in coordinates]
lon = [x[0] for x in coordinates]
return {'xmin': min(lon), 'xmax': max(lon), 'ymin': min(lat), 'ymax': max(lat)}
def getFileObj(self, filename):
"""
Load a file into a readable file object.
Parameters
----------
filename: str
the name of a file in the scene archive, easiest to get with method :meth:`~ID.findfiles`
Returns
-------
io.BytesIO
a file pointer object
"""
return getFileObj(self.scene, filename)
def getGammaImages(self, directory=None):
"""
list all files processed by GAMMA
Parameters
----------
directory: str or None
the directory to be scanned; if left empty the object attribute `gammadir` is scanned
Returns
-------
list[str]
the file names of the images processed by GAMMA
Raises
-------
RuntimeError
"""
if directory is None:
if hasattr(self, 'gammadir'):
directory = self.gammadir
else:
raise RuntimeError(
'directory missing; please provide directory to function or define object attribute "gammadir"')
return [x for x in finder(directory, [self.outname_base()], regex=True) if
not re.search(r'\.(?:par|hdr|aux\.xml|swp|sh)$', x)]
def getHGT(self):
"""
get the names of all SRTM HGT tiles overlapping with the SAR scene
Returns
-------
list[str]
names of the SRTM HGT tiles
"""
corners = self.getCorners()
# generate sequence of integer coordinates marking the tie points of the overlapping hgt tiles
lat = range(int(float(corners['ymin']) // 1), int(float(corners['ymax']) // 1) + 1)
lon = range(int(float(corners['xmin']) // 1), int(float(corners['xmax']) // 1) + 1)
# convert coordinates to string with leading zeros and hemisphere identification letter
lat = [str(x).zfill(2 + len(str(x)) - len(str(x).strip('-'))) for x in lat]
lat = [x.replace('-', 'S') if '-' in x else 'N' + x for x in lat]
lon = [str(x).zfill(3 + len(str(x)) - len(str(x).strip('-'))) for x in lon]
lon = [x.replace('-', 'W') if '-' in x else 'E' + x for x in lon]
# concatenate all formatted latitudes and longitudes with each other as final product
return [x + y + '.hgt' for x in lat for y in lon]
def is_processed(self, outdir, recursive=False):
"""
check whether a scene has already been processed and stored in the defined output directory
(and subdirectories if scanned recursively)
Parameters
----------
outdir: str
the directory to be checked
Returns
-------
bool
does an image matching the scene pattern exist?
"""
if os.path.isdir(outdir):
# '{}.*tif$'.format(self.outname_base())
return len(finder(outdir, [self.outname_base()], regex=True, recursive=recursive)) != 0
else:
return False
def outname_base(self, extensions=None):
"""
parse a string containing basic information about the scene in standardized format.
Currently, this id contains the sensor (4 digits), acquisition mode (4 digits), orbit (1 digit)
and acquisition start time (15 digits)., e.g. `S1A__IW___A_20150523T122350`.
Parameters
----------
extensions: list[str] or None
the names of additional parameters to append to the basename, e.g. ``['orbitNumber_rel']``
Returns
-------
str
a standardized name unique to the scene
"""
fields = ('{:_<4}'.format(self.sensor),
'{:_<4}'.format(self.acquisition_mode),
self.orbit,
self.start)
out = '_'.join(fields)
if isinstance(extensions, list) and len(extensions) is not None:
ext = '_'.join([str(getattr(self, key)) for key in extensions])
out += '_' + ext
return out
@staticmethod
def parse_date(x):
"""
this function gathers known time formats provided in the different SAR products and converts them to a common
standard of the form YYYYMMDDTHHMMSS.
Parameters
----------
x: str
the time stamp
Returns
-------
str
the converted time stamp in format YYYYmmddTHHMMSS
"""
return parse_date(x)
@abc.abstractmethod
def quicklook(self, outname, format='kmz'):
"""
export a quick look image of the scene
Parameters
----------
outname: str
the name of the output file
format: str
the format of the file to write;
currently only kmz is supported
Returns
-------
Examples
--------
>>> from pyroSAR import identify
>>> scene = identify('S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip')
>>> scene.quicklook('S1A__IW___A_20180101T170648.kmz')
"""
raise NotImplementedError
@property
def start_dt(self) -> datetime:
"""
Returns
-------
the acquisition start time as timezone-aware datetime object
"""
out = datetime.strptime(self.start, '%Y%m%dT%H%M%S')
return out.replace(tzinfo=timezone.utc)
@property
def stop_dt(self) -> datetime:
"""
Returns
-------
the acquisition stop time as timezone-aware datetime object
"""
out = datetime.strptime(self.stop, '%Y%m%dT%H%M%S')
return out.replace(tzinfo=timezone.utc)
def summary(self):
"""
print the set of standardized scene metadata attributes
Returns
-------
"""
print(self.__str__())
@abc.abstractmethod
def scanMetadata(self):
"""
scan SAR scenes for metadata attributes.
The returned dictionary is registered as attribute `meta` by the class upon object initialization.
This dictionary furthermore needs to return a set of standardized attribute keys,
which are directly registered as object attributes.
Returns
-------
dict
the derived attributes
"""
raise NotImplementedError
@abc.abstractmethod
def unpack(self, directory, overwrite=False, exist_ok=False):
"""
Unpack the SAR scene into a defined directory.
Parameters
----------
directory: str
the base directory into which the scene is unpacked
overwrite: bool
overwrite an existing unpacked scene?
exist_ok: bool
allow existing output files and do not create new ones?
Returns
-------
"""
raise NotImplementedError
def _unpack(self, directory, offset=None, overwrite=False, exist_ok=False):
"""
general function for unpacking scene archives; to be called by implementations of ID.unpack.
Will reset object attributes `scene` and `file` to point to the locations of the unpacked scene
Parameters
----------
directory: str
the name of the directory in which the files are written
offset: str
an archive directory offset; to be defined if only a subdirectory is to be unpacked (see e.g. TSX.unpack)
overwrite: bool
should an existing directory be overwritten?
exist_ok: bool
do not attempt unpacking if the target directory already exists? Ignored if ``overwrite==True``
Returns
-------
"""
do_unpack = True
if os.path.isdir(directory):
if overwrite:
shutil.rmtree(directory)
else:
if exist_ok:
do_unpack = False
else:
raise RuntimeError('target scene directory already exists: {}'.format(directory))
os.makedirs(directory, exist_ok=True)
if do_unpack:
if tf.is_tarfile(self.scene):
archive = tf.open(self.scene, 'r')
names = archive.getnames()
if offset is not None:
names = [x for x in names if x.startswith(offset)]
header = os.path.commonprefix(names)
if header in names:
if archive.getmember(header).isdir():
for item in sorted(names):
if item != header:
member = archive.getmember(item)
if offset is not None:
member.name = member.name.replace(offset + '/', '')
archive.extract(member, directory)
archive.close()
else:
archive.extractall(directory)
archive.close()
elif zf.is_zipfile(self.scene):
archive = zf.ZipFile(self.scene, 'r')
names = archive.namelist()
header = os.path.commonprefix(names)
if header.endswith('/'):
for item in sorted(names):
if item != header:
repl = item.replace(header, '', 1)
outname = os.path.join(directory, repl)
outname = outname.replace('/', os.path.sep)
if item.endswith('/'):
os.makedirs(outname, exist_ok=True)
else:
os.makedirs(os.path.dirname(outname), exist_ok=True)
try:
with open(outname, 'wb') as outfile:
outfile.write(archive.read(item))
except zf.BadZipfile:
log.info('corrupt archive, unpacking failed')
continue
archive.close()
else:
archive.extractall(directory)
archive.close()
else:
log.info('unpacking is only supported for TAR and ZIP archives')
return
self.scene = directory
main = os.path.join(self.scene, os.path.basename(self.file))
self.file = main if os.path.isfile(main) else self.scene
class BEAM_DIMAP(ID):
"""
Handler class for BEAM-DIMAP data
Sensors:
* SNAP supported sensors
"""
def __init__(self, scene):
if not scene.lower().endswith('.dim'):
raise RuntimeError('Scene format is not BEAM-DIMAP')
self.root = None
self.scene = scene
self.meta = self.scanMetadata()
super(BEAM_DIMAP, self).__init__(self.meta)
def scanMetadata(self):
meta = dict()
self.root = ET.parse(self.scene).getroot()
def get_by_name(attr: list[str] | str, section: str = 'Abstracted_Metadata') -> str:
msg = 'cannot get attribute "{}" from section "{}"'
if isinstance(attr, list):
for i, item in enumerate(attr):
try:
return get_by_name(item, section=section)
except RuntimeError:
continue
raise RuntimeError(msg.format('|'.join(attr), section))
else:
element = self.root.find(f'.//MDElem[@name="{section}"]')
out = element.find(f'.//MDATTR[@name="{attr}"]')
if out is None or out.text in ['99999', '99999.0']:
raise RuntimeError(msg.format(attr, section))
return out.text
missions = {'ENVISAT': 'ASAR',
'ERS1': 'ERS1',
'ERS2': 'ERS2',
'SENTINEL-1A': 'S1A',
'SENTINEL-1B': 'S1B',
'SENTINEL-1C': 'S1C',
'SENTINEL-1D': 'S1D'}
section = 'Abstracted_Metadata'
meta['sensor'] = missions[get_by_name('MISSION', section=section)]
if re.search('S1[A-Z]', meta['sensor']):
meta['acquisition_mode'] = get_by_name('ACQUISITION_MODE', section=section)
meta['product'] = self.root.find('.//PRODUCT_TYPE').text
elif meta['sensor'] in ['ASAR', 'ERS1', 'ERS2']:
product_type = get_by_name('PRODUCT_TYPE', section=section)
meta['acquisition_mode'] = product_type[4:7]
# product overview table: https://doi.org/10.5167/UZH-96146
if meta['acquisition_mode'] in ['APS', 'IMS', 'WSS']:
meta['product'] = 'SLC'
elif meta['acquisition_mode'] in ['APP', 'IMP']:
meta['product'] = 'PRI'
elif meta['acquisition_mode'] in ['APM', 'IMM', 'WSM']:
meta['product'] = 'MR'
else:
raise RuntimeError(f"unsupported acquisition mode: '{meta['acquisition_mode']}'")
else:
raise RuntimeError('unknown sensor {}'.format(meta['sensor']))
meta['IPF_version'] = get_by_name('Processing_system_identifier', section=section)
meta['orbit'] = get_by_name('PASS', section=section)[0]
pols = [x.text for x in self.root.findall('.//MDATTR[@desc="Polarization"]')]
pols = list(filter(None, pols))
meta['polarizations'] = list(set([x for x in pols if '-' not in x]))
meta['spacing'] = (round(float(get_by_name('range_spacing', section=section)), 6),
round(float(get_by_name('azimuth_spacing', section=section)), 6))
meta['looks'] = (float(get_by_name('range_looks', section=section)),
float(get_by_name('azimuth_looks', section=section)))
meta['samples'] = int(self.root.find('.//BAND_RASTER_WIDTH').text)
meta['lines'] = int(self.root.find('.//BAND_RASTER_HEIGHT').text)
meta['bands'] = int(self.root.find('.//NBANDS').text)
meta['orbitNumber_abs'] = int(get_by_name('ABS_ORBIT', section=section))
meta['orbitNumber_rel'] = int(get_by_name('REL_ORBIT', section=section))
meta['cycleNumber'] = int(get_by_name(['orbit_cycle', 'CYCLE'], section=section))
meta['frameNumber'] = int(get_by_name(['data_take_id', 'ABS_ORBIT'], section=section))
meta['swath'] = get_by_name('SWATH', section=section)
srgr = bool(int(get_by_name('srgr_flag', section=section)))
meta['image_geometry'] = 'GROUND_RANGE' if srgr else 'SLANT_RANGE'
#################################################################################
# start, stop
start = datetime.strptime(self.root.find('.//PRODUCT_SCENE_RASTER_START_TIME').text,
'%d-%b-%Y %H:%M:%S.%f')
meta['start'] = start.strftime('%Y%m%dT%H%M%S')
stop = datetime.strptime(self.root.find('.//PRODUCT_SCENE_RASTER_STOP_TIME').text,
'%d-%b-%Y %H:%M:%S.%f')
meta['stop'] = stop.strftime('%Y%m%dT%H%M%S')
#################################################################################
# incident angle
# the incident angle is not stored consistently so several options are tried
while True:
# may be missing or set to '99999.0'
try:
inc_near = get_by_name('incidence_near', section=section)
inc_far = get_by_name('incidence_far', section=section)
incidence = (float(inc_near) + float(inc_far)) / 2
break
except RuntimeError:
pass
# this attribute might only apply to Sentinel-1
inc_elements = self.root.findall('.//MDATTR[@name="incidenceAngleMidSwath"]')
if len(inc_elements) > 0:
incidence = [float(x.text) for x in inc_elements]
incidence = mean(incidence)
break
# the tie point grids are no longer present in geocoded products
inc_grid = os.path.join(self.scene.replace('.dim', '.data'),
'tie_point_grids', 'incident_angle.img')
if os.path.isfile(inc_grid):
ras = gdal.Open(inc_grid)
arr = ras.ReadAsArray()
incidence = np.mean(arr[arr != 0])
ras = arr = None
break
raise ValueError('cannot read the incident angle')
meta['incidence'] = incidence
#################################################################################
# projection
if self.root.find('.//WKT') is not None:
meta['projection'] = self.root.find('.//WKT').text.lstrip()
else:
meta['projection'] = crsConvert(4326, 'wkt')
#################################################################################
# coordinates
keys = ['{}_{}_{}'.format(a, b, c)
for a in ['first', 'last']
for b in ['far', 'near']
for c in ['lat', 'long']]
coords = {key: float(get_by_name(key, section=section))
for key in keys}
meta['coordinates'] = [(coords['first_near_long'], coords['first_near_lat']),
(coords['last_near_long'], coords['last_near_lat']),
(coords['last_far_long'], coords['last_far_lat']),
(coords['first_far_long'], coords['first_far_lat'])]
#################################################################################
return meta
def unpack(self, directory, overwrite=False, exist_ok=False):
raise RuntimeError('unpacking of BEAM-DIMAP products is not supported')
class CEOS_ERS(ID):
"""
Handler class for ERS data in CEOS format
Sensors:
* ERS1
* ERS2
Reference:
ER-IS-EPO-GS-5902-3: Annex C. ERS SAR.SLC/SLC-I. CCT and EXABYTE
(`ESA 1998 `_)
"""
def __init__(self, scene):
self.pattern = patterns.ceos_ers
self.pattern_pid = r'(?P(?:SAR|ASA))_' \
r'(?P(?:IM(?:S|P|G|M|_)|AP(?:S|P|G|M|_)|WV(?:I|S|W|_)|WS(?:M|S|_)))_' \
r'(?P[012B][CP])'
self.scene = os.path.realpath(scene)
self.examine()
self.meta = self.scanMetadata()
# register the standardized meta attributes as object attributes
super(CEOS_ERS, self).__init__(self.meta)
def unpack(self, directory, overwrite=False, exist_ok=False):
if self.sensor in ['ERS1', 'ERS2']:
base_file = re.sub(r'\.PS$', '', os.path.basename(self.file))
base_dir = os.path.basename(directory.strip('/'))
outdir = directory if base_file == base_dir else os.path.join(directory, base_file)
self._unpack(outdir, overwrite=overwrite, exist_ok=exist_ok)
else:
raise NotImplementedError('sensor {} not implemented yet'.format(self.sensor))
def scanMetadata(self):
meta = dict()
match = re.match(re.compile(self.pattern), os.path.basename(self.file))
match2 = re.match(re.compile(self.pattern_pid), match.group('product_id'))
if re.search('IM__0', match.group('product_id')):
raise RuntimeError('product level 0 not supported (yet)')
meta['acquisition_mode'] = match2.group('image_mode')
meta['product'] = 'SLC' if meta['acquisition_mode'] in ['IMS', 'APS', 'WSS'] else 'PRI'
lea_obj = self.getFileObj(self.findfiles('LEA_01.001')[0])
lea = lea_obj.read()
lea_obj.close()
fdr = lea[0:720] # file descriptor record
dss = lea[720:(720 + 1886)] # data set summary record
mpd = lea[(720 + 1886):(720 + 1886 + 1620)] # map projection data record
ppd_start = 720 + 1886 + 1620
ppd_length = struct.unpack('>i', lea[ppd_start + 8: ppd_start + 12])[0]
ppd = lea[ppd_start:ppd_length] # platform position data record
frd_start = 720 + 1886 + 1620 + ppd_length
frd = lea[frd_start:(frd_start + 12288)] # facility related data record
meta['sensor'] = dss[396:412].strip().decode()
meta['start'] = self.parse_date(str(dss[1814:1838].decode('utf-8')))
meta['stop'] = self.parse_date(str(dss[1862:1886].decode('utf-8')))
meta['polarizations'] = ['VV']
looks_range = float(dss[1174:1190])
looks_azimuth = float(dss[1190:1206])
meta['looks'] = (looks_range, looks_azimuth)
meta['heading'] = float(dss[468:476])
meta['orbit'] = 'D' if meta['heading'] > 180 else 'A'
orbitNumber, frameNumber = map(int, re.findall('[0-9]+', dss[36:68].decode('utf-8')))
meta['orbitNumber_abs'] = orbitNumber
meta['frameNumber'] = frameNumber
orbitInfo = passdb_query(meta['sensor'], datetime.strptime(meta['start'], '%Y%m%dT%H%M%S'))
meta['cycleNumber'] = orbitInfo['cycleNumber']
meta['orbitNumber_rel'] = orbitInfo['orbitNumber_rel']
spacing_azimuth = float(dss[1686:1702])
spacing_range = float(dss[1702:1718])
meta['spacing'] = (spacing_range, spacing_azimuth)
meta['incidence_angle'] = float(dss[484:492])
meta['proc_facility'] = dss[1045:1061].strip().decode()
meta['proc_system'] = dss[1061:1069].strip().decode()
meta['proc_version'] = dss[1069:1077].strip().decode()
meta['antenna_flag'] = int(frd[658:662])
meta['k_db'] = -10 * math.log(float(frd[662:678]), 10)
meta['sc_db'] = {'ERS1': 59.61, 'ERS2': 60}[meta['sensor']]
meta['samples'] = int(mpd[60:76])
meta['lines'] = int(mpd[76:92])
ul = (float(mpd[1088:1104]), float(mpd[1072:1088]))
ur = (float(mpd[1120:1136]), float(mpd[1104:1120]))
lr = (float(mpd[1152:1168]), float(mpd[1136:1152]))
ll = (float(mpd[1184:1200]), float(mpd[1168:1184]))
meta['coordinates'] = [ul, ur, lr, ll]
meta['projection'] = crsConvert(4326, 'wkt')
return meta
# def correctAntennaPattern(self):
# the following section is only relevant for PRI products and can be considered future work
# select antenna gain correction lookup file from extracted meta information
# the lookup files are stored in a subfolder CAL which is included in the pythonland software package
# if sensor == 'ERS1':
# if date < 19950717:
# antenna = 'antenna_ERS1_x_x_19950716'
# else:
# if proc_sys == 'VMP':
# antenna = 'antenna_ERS2_VMP_v68_x' if proc_vrs >= 6.8 else 'antenna_ERS2_VMP_x_v67'
# elif proc_fac == 'UKPAF' and date < 19970121:
# antenna = 'antenna_ERS1_UKPAF_19950717_19970120'
# else:
# antenna = 'antenna_ERS1'
# else:
# if proc_sys == 'VMP':
# antenna = 'antenna_ERS2_VMP_v68_x' if proc_vrs >= 6.8 else 'antenna_ERS2_VMP_x_v67'
# elif proc_fac == 'UKPAF' and date < 19970121:
# antenna = 'antenna_ERS2_UKPAF_x_19970120'
# else:
# antenna = 'antenna_ERS2'
class CEOS_PSR(ID):
"""
Handler class for ALOS-PALSAR data in CEOS format
Sensors:
* PSR1
* PSR2
PALSAR-1:
References:
* NEB-01006: ALOS/PALSAR Level 1 Product Format Description
(`JAXA 2006 `_)
* NEB-070062B: ALOS/PALSAR Level 1.1/1.5 Product Format Description
(`JAXA 2009 `_)
Products / processing levels:
* 1.0
* 1.1
* 1.5
Acquisition modes:
* AB: [SP][HWDPC]
* A: supplemental remarks of the sensor type:
* S: Wide observation mode
* P: all other modes
* B: observation mode
* H: Fine mode
* W: ScanSAR mode
* D: Direct downlink mode
* P: Polarimetry mode
* C: Calibration mode
PALSAR-2:
Reference:
ALOS-2/PALSAR-2 Level 1.1/1.5/2.1/3.1 CEOS SAR Product Format Description
(`JAXA 2014 `_).
Products / processing levels:
* 1.0
* 1.1
* 1.5
Acquisition modes:
* SBS: Spotlight mode
* UBS: Ultra-fine mode Single polarization
* UBD: Ultra-fine mode Dual polarization
* HBS: High-sensitive mode Single polarization
* HBD: High-sensitive mode Dual polarization
* HBQ: High-sensitive mode Full (Quad.) polarimetry
* FBS: Fine mode Single polarization
* FBD: Fine mode Dual polarization
* FBQ: Fine mode Full (Quad.) polarimetry
* WBS: Scan SAR nominal [14MHz] mode Single polarization
* WBD: Scan SAR nominal [14MHz] mode Dual polarization
* WWS: Scan SAR nominal [28MHz] mode Single polarization
* WWD: Scan SAR nominal [28MHz] mode Dual polarization
* VBS: Scan SAR wide mode Single polarization
* VBD: Scan SAR wide mode Dual polarization
"""
def __init__(self, scene):
self.scene = os.path.realpath(scene)
candidates = [patterns.ceos_psr1, patterns.ceos_psr2]
for i, pattern in enumerate(candidates):
self.pattern = pattern
try:
self.examine()
break
except RuntimeError as e:
if i + 1 == len(candidates):
raise e
self.meta = self.scanMetadata()
# register the standardized meta attributes as object attributes
super(CEOS_PSR, self).__init__(self.meta)
def _getLeaderfileContent(self):
led_obj = self.getFileObj(self.led_filename)
led = led_obj.read()
led_obj.close()
return led
def _img_get_coordinates(self):
img_filename = self.findfiles('IMG')[0]
img_obj = self.getFileObj(img_filename)
imageFileDescriptor = img_obj.read(720)
lineRecordLength = int(imageFileDescriptor[186:192]) # bytes per line + 412
numberOfRecords = int(imageFileDescriptor[180:186])
signalDataDescriptor1 = img_obj.read(412)
img_obj.seek(720 + lineRecordLength * (numberOfRecords - 1))
signalDataDescriptor2 = img_obj.read()
img_obj.close()
lat = [signalDataDescriptor1[192:196], signalDataDescriptor1[200:204],
signalDataDescriptor2[192:196], signalDataDescriptor2[200:204]]
lon = [signalDataDescriptor1[204:208], signalDataDescriptor1[212:216],
signalDataDescriptor2[204:208], signalDataDescriptor2[212:216]]
lat = [struct.unpack('>i', x)[0] / 1000000. for x in lat]
lon = [struct.unpack('>i', x)[0] / 1000000. for x in lon]
return list(zip(lon, lat))
def _parseSummary(self):
try:
summary_file = self.getFileObj(self.findfiles('summary|workreport')[0])
except IndexError:
return {}
text = summary_file.getvalue().decode('utf-8').strip()
summary_file.close()
summary = ast.literal_eval('{"' + re.sub(r'\s*=', '":', text).replace('\n', ',"') + '}')
for x, y in summary.items():
summary[x] = parse_literal(y)
return summary
@property
def led_filename(self):
return self.findfiles(self.pattern)[0]
def scanMetadata(self):
################################################################################################################
# read leader (LED) file
led = self._getLeaderfileContent()
# read summary text file
meta = self._parseSummary()
# read polarizations from image file names
meta['polarizations'] = [re.search('[HV]{2}', os.path.basename(x)).group(0) for x in self.findfiles('^IMG-')]
################################################################################################################
# read start and stop time
try:
meta['start'] = self.parse_date(meta['Img_SceneStartDateTime'])
meta['stop'] = self.parse_date(meta['Img_SceneEndDateTime'])
except (AttributeError, KeyError):
try:
start_string = re.search('Img_SceneStartDateTime[ ="0-9:.]*', led).group()
stop_string = re.search('Img_SceneEndDateTime[ ="0-9:.]*', led).group()
meta['start'] = self.parse_date(re.search(r'\d+\s[\d:.]+', start_string).group())
meta['stop'] = self.parse_date(re.search(r'\d+\s[\d:.]+', stop_string).group())
except AttributeError:
raise IndexError('start and stop time stamps cannot be extracted; see file {}'
.format(self.led_filename))
################################################################################################################
# read file descriptor record
p0 = 0
p1 = struct.unpack('>i', led[8:12])[0]
fileDescriptor = led[p0:p1]
# dataSetSummary
dss_n = int(fileDescriptor[180:186])
dss_l = int(fileDescriptor[186:192])
# mapProjectionData
mpd_n = int(fileDescriptor[192:198])
mpd_l = int(fileDescriptor[198:204])
# platformPositionData
ppd_n = int(fileDescriptor[204:210])
ppd_l = int(fileDescriptor[210:216])
# attitudeData
adr_n = int(fileDescriptor[216:222])
adr_l = int(fileDescriptor[222:228])
# radiometricData
rdr_n = int(fileDescriptor[228:234])
rdr_l = int(fileDescriptor[234:240])
# dataQualitySummary
dqs_n = int(fileDescriptor[252:258])
dqs_l = int(fileDescriptor[258:264])
meta['sensor'] = {'AL1': 'PSR1', 'AL2': 'PSR2'}[fileDescriptor[48:51].decode('utf-8')]
################################################################################################################
# read leader file name information
match = re.match(re.compile(self.pattern), os.path.basename(self.led_filename))
if meta['sensor'] == 'PSR1':
meta['acquisition_mode'] = match.group('sub') + match.group('mode')
else:
meta['acquisition_mode'] = match.group('mode')
meta['product'] = match.group('level')
################################################################################################################
# read led records
p0 = p1
p1 += dss_l * dss_n
dataSetSummary = led[p0:p1]
if mpd_n > 0:
p0 = p1
p1 += mpd_l * mpd_n
mapProjectionData = led[p0:p1]
else:
mapProjectionData = None
p0 = p1
p1 += ppd_l * ppd_n
platformPositionData = led[p0:p1]
p0 = p1
p1 += adr_l * adr_n
attitudeData = led[p0:p1]
p0 = p1
p1 += rdr_l * rdr_n
radiometricData = led[p0:p1]
p0 = p1
p1 += dqs_l * dqs_n
dataQualitySummary = led[p0:p1]
facilityRelatedData = []
while p1 < len(led):
p0 = p1
length = struct.unpack('>i', led[(p0 + 8):(p0 + 12)])[0]
p1 += length
facilityRelatedData.append(led[p0:p1])
################################################################################################################
# read map projection data record
if mapProjectionData is not None:
lat = list(map(float, [mapProjectionData[1072:1088],
mapProjectionData[1104:1120],
mapProjectionData[1136:1152],
mapProjectionData[1168:1184]]))
lon = list(map(float, [mapProjectionData[1088:1104],
mapProjectionData[1120:1136],
mapProjectionData[1152:1168],
mapProjectionData[1184:1200]]))
meta['coordinates'] = list(zip(lon, lat))
# https://github.com/datalyze-solutions/LandsatProcessingPlugin/blob/master/src/metageta/formats/alos.py
src_srs = osr.SpatialReference()
# src_srs.SetGeogCS('GRS 1980','GRS 1980','GRS 1980',6378137.00000,298.2572220972)
src_srs.SetWellKnownGeogCS('WGS84')
# Proj CS
projdesc = mapProjectionData[412:444].strip()
epsg = 0 # default
if projdesc == 'UTM-PROJECTION':
nZone = int(mapProjectionData[476:480])
dfFalseNorthing = float(mapProjectionData[496:512])
if dfFalseNorthing > 0.0:
bNorth = False
epsg = 32700 + nZone
else:
bNorth = True
epsg = 32600 + nZone
src_srs.ImportFromEPSG(epsg)
# src_srs.SetUTM(nZone,bNorth) #generates WKT that osr.SpatialReference.AutoIdentifyEPSG() doesn't return an EPSG for
elif projdesc == 'UPS-PROJECTION':
dfCenterLon = float(mapProjectionData[624, 640])
dfCenterLat = float(mapProjectionData[640, 656])
dfScale = float(mapProjectionData[656, 672])
src_srs.SetPS(dfCenterLat, dfCenterLon, dfScale, 0.0, 0.0)
elif projdesc == 'MER-PROJECTION':
dfCenterLon = float(mapProjectionData[736, 752])
dfCenterLat = float(mapProjectionData[752, 768])
src_srs.SetMercator(dfCenterLat, dfCenterLon, 0, 0, 0)
elif projdesc == 'LCC-PROJECTION':
dfCenterLon = float(mapProjectionData[736, 752])
dfCenterLat = float(mapProjectionData[752, 768])
dfStdP1 = float(mapProjectionData[768, 784])
dfStdP2 = float(mapProjectionData[784, 800])
src_srs.SetLCC(dfStdP1, dfStdP2, dfCenterLat, dfCenterLon, 0, 0)
meta['projection'] = src_srs.ExportToWkt()
else:
coordinates = self._img_get_coordinates()
if all([x == (0, 0) for x in coordinates]):
meta['projection'] = None
else:
meta['coordinates'] = coordinates
meta['projection'] = crsConvert(4326, 'wkt')
################################################################################################################
# read data set summary record
if meta['product'] == '1.5':
meta["heading_scene"] = float(dataSetSummary[148:164])
meta["heading"] = float(dataSetSummary[468:476])
else:
meta["heading_scene"] = None
meta["heading"] = None
scene_id = dataSetSummary[20:52].decode('ascii')
if meta['sensor'] == 'PSR1':
pattern = r'(?P[A-Z]{2})' \
r'(?P[A-Z]{3})' \
r'(?P[A-Z]{1})' \
r'(?P[0-9]{5})' \
r'(?P[0-9]{4})'
elif meta['sensor'] == 'PSR2':
pattern = r'(?P[A-Z0-9]{5})' \
r'(?P[0-9]{5})' \
r'(?P[0-9]{4})-' \
r'(?P[0-9]{6})[ ]{11}'
else:
raise ValueError('sensor must be either PSR1 or PSR2; is: {}'.format(meta['sensor']))
match = re.match(re.compile(pattern), scene_id)
orbitsPerCycle = {'PSR1': 671, 'PSR2': 207}[meta['sensor']]
meta['orbitNumber_abs'] = int(match.group('orbitNumber'))
meta['orbitNumber_rel'] = meta['orbitNumber_abs'] % orbitsPerCycle
meta['cycleNumber'] = meta['orbitNumber_abs'] // orbitsPerCycle + 1
meta['frameNumber'] = int(match.group('frameNumber'))
try:
meta['lines'] = int(dataSetSummary[324:332]) * 2
except ValueError:
if 'Pdi_NoOfLines' in meta.keys():
meta['lines'] = meta['Pdi_NoOfLines']
else:
meta['lines'] = None
try:
meta['samples'] = int(dataSetSummary[332:340]) * 2
except ValueError:
if 'Pdi_NoOfPixels' in meta.keys():
meta['samples'] = meta['Pdi_NoOfPixels']
else:
meta['samples'] = None
meta['incidence'] = float(dataSetSummary[484:492])
meta['wavelength'] = float(dataSetSummary[500:516]) * 100 # in cm
meta['proc_facility'] = dataSetSummary[1046:1062].strip()
meta['proc_system'] = dataSetSummary[1062:1070].strip()
meta['proc_version'] = dataSetSummary[1070:1078].strip()
try:
azlks = float(dataSetSummary[1174:1190])
rlks = float(dataSetSummary[1190:1206])
meta['looks'] = (rlks, azlks)
except ValueError:
meta['looks'] = (None, None)
meta['orbit'] = dataSetSummary[1534:1542].decode('utf-8').strip()[0]
try:
spacing_azimuth = float(dataSetSummary[1686:1702])
spacing_range = float(dataSetSummary[1702:1718])
meta['spacing'] = (spacing_range, spacing_azimuth)
except ValueError:
meta['spacing'] = (None, None)
################################################################################################################
# read radiometric data record
if len(radiometricData) > 0:
meta['k_dB'] = float(radiometricData[20:36])
else:
meta['k_dB'] = None
################################################################################################################
# additional notes
# the following can be used to read platform position time from the led file
# this covers a larger time frame than the actual scene sensing time
# y, m, d, nd, s = platformPositionData[144:182].split()
# start = datetime(int(y), int(m), int(d)) + timedelta(seconds=float(s))
# npoints = int(platformPositionData[140:144])
# interval = float(platformPositionData[182:204])
# stop = start + timedelta(seconds=(npoints - 1) * interval)
# parse_date(start)
# parse_date(stop)
return meta
def unpack(self, directory, overwrite=False, exist_ok=False):
outdir = os.path.join(directory, os.path.basename(self.file).replace('LED-', ''))
self._unpack(outdir, overwrite=overwrite, exist_ok=exist_ok)
class EORC_PSR(ID):
"""
Handler class for ALOS-2/PALSAR-2 data in EORC (Earth Observation Research Center) Path format
Sensors:
* PALSAR-2
PALSAR-2:
Reference:
NDX-150019: ALOS-2/PALSAR-2 EORC Path Product Format Description (JAXA 2016)
Products / processing levels:
* 1.5
Acquisition modes:
* FBD: Fine mode Dual polarization
* WBD: Scan SAR nominal [14MHz] mode Dual polarization
"""
def __init__(self, scene):
self.scene = os.path.realpath(scene)
self.pattern = patterns.eorc_psr
self.examine()
self.meta = self.scanMetadata()
# register the standardized meta attributes as object attributes
super(EORC_PSR, self).__init__(self.meta)
def _getHeaderfileContent(self):
head_obj = self.getFileObj(self.header_filename)
head = head_obj.read().decode('utf-8')
head = list(head.split('\n'))
head_obj.close()
return head
def _img_get_coordinates(self):
img_filename = self.findfiles('IMG')[0]
img_obj = self.getFileObj(img_filename)
imageFileDescriptor = img_obj.read(720)
lineRecordLength = int(imageFileDescriptor[186:192]) # bytes per line + 412
numberOfRecords = int(imageFileDescriptor[180:186])
signalDataDescriptor1 = img_obj.read(412)
img_obj.seek(720 + lineRecordLength * (numberOfRecords - 1))
signalDataDescriptor2 = img_obj.read()
img_obj.close()
lat = [signalDataDescriptor1[192:196], signalDataDescriptor1[200:204],
signalDataDescriptor2[192:196], signalDataDescriptor2[200:204]]
lon = [signalDataDescriptor1[204:208], signalDataDescriptor1[212:216],
signalDataDescriptor2[204:208], signalDataDescriptor2[212:216]]
lat = [struct.unpack('>i', x)[0] / 1000000. for x in lat]
lon = [struct.unpack('>i', x)[0] / 1000000. for x in lon]
return list(zip(lon, lat))
def _parseFacter_m(self):
try:
facter_file = self.findfiles('facter_m.dat')[0]
except IndexError:
return {}
facter_obj = self.getFileObj(facter_file)
facter_m = facter_obj.read().decode('utf-8')
facter_m = list(facter_m.split('\n'))
facter_obj.close()
return facter_m
@property
def header_filename(self):
return self.findfiles(self.pattern)[0]
def scanMetadata(self):
################################################################################################################
# read header (HDR) file
header = self._getHeaderfileContent()
header = [head.replace(" ", "") for head in header]
# read summary text file
facter_m = self._parseFacter_m()
facter_m = [fact.replace(" ", "") for fact in facter_m]
meta = {}
# read polarizations from image file names
meta['polarizations'] = [re.search('[HV]{2}', os.path.basename(x)).group(0) for x in self.findfiles('^sar.')]
meta['product'] = header[3]
################################################################################################################
# read start and stop time --> TODO: in what format is the start and stop time?
try:
start_time = facter_m[168].split('.')[0].zfill(2) + facter_m[168].split('.')[1][:4]
stop_time = facter_m[170].split('.')[0].zfill(2) + facter_m[170].split('.')[1][:4]
except (AttributeError):
raise IndexError('start and stop time stamps cannot be extracted; see file facter_m.dat')
meta['start'] = str(header[6]) # +'T'+start_time
meta['stop'] = str(header[6]) # +'T'+stop_time
################################################################################################################
# read file metadata
meta['sensor'] = header[2]
################################################################################################################
# read leader file name information
meta['acquisition_mode'] = header[12]
# ##############################################################################################################
# read map projection data
lat = list(map(float, [header[33], header[35], header[37], header[39]]))
lon = list(map(float, [header[34], header[36], header[38], header[40]]))
if len(lat) == 0 or len(lon) == 0:
meta['coordinates'] = self._img_get_coordinates()
else:
meta['coordinates'] = list(zip(lon, lat))
meta['projection'] = crsConvert(4918, 'wkt') # EPSG: 4918: ITRF97, GRS80
################################################################################################################
# read data set summary record
orbitsPerCycle = int(207)
meta['orbitNumber_rel'] = int(header[7])
meta['cycleNumber'] = header[5]
meta['frameNumber'] = ''
meta['orbitNumber_abs'] = int(orbitsPerCycle * (meta['cycleNumber'] - 1) + meta['orbitNumber_rel'])
meta['lines'] = int(float(facter_m[51]))
meta['samples'] = int(float(facter_m[50]))
meta['incidence'] = float(facter_m[119])
meta['proc_facility'] = header[73]
meta['spacing'] = (float(header[51]), float(header[52]))
meta['orbit'] = header[9]
################################################################################################################
# read radiometric data record
meta['k_dB'] = float(header[64])
return meta
def unpack(self, directory, overwrite=False, exist_ok=False):
outdir = os.path.join(directory, os.path.basename(self.file).replace('LED-', ''))
self._unpack(outdir, overwrite=overwrite, exist_ok=exist_ok)
class ESA(ID):
"""
Handler class for SAR data in ESA format (Envisat ASAR, ERS-1/2)
Sensors:
* ASAR
* ERS1
* ERS2
"""
def __init__(self, scene):
self.pattern = patterns.esa
self.pattern_pid = r'(?P(?:SAR|ASA))_' \
r'(?P(?:IM(?:S|P|G|M|_)|AP(?:S|P|G|M|_)|WV(?:I|S|W|_)|WS(?:M|S|_)))_' \
r'(?P[012B][CP])'
self.scene = os.path.realpath(scene)
if re.search('.[EN][12]$', self.scene):
self.file = self.scene
else:
self.examine()
self.meta = self.scanMetadata()
# register the standardized meta attributes as object attributes
super(ESA, self).__init__(self.meta)
def scanMetadata(self):
match = re.match(re.compile(self.pattern), os.path.basename(self.file))
match2 = re.match(re.compile(self.pattern_pid), match.group('product_id'))
if re.search('IM__0', match.group('product_id')):
raise RuntimeError('product level 0 not supported (yet)')
meta = dict()
sensor_lookup = {'N1': 'ASAR', 'E1': 'ERS1', 'E2': 'ERS2'}
meta['sensor'] = sensor_lookup[match.group('satellite_ID')]
meta['acquisition_mode'] = match2.group('image_mode')
meta['image_geometry'] = 'GROUND_RANGE'
# product overview table: https://doi.org/10.5167/UZH-96146
if meta['acquisition_mode'] in ['APS', 'IMS', 'WSS']:
meta['product'] = 'SLC'
meta['image_geometry'] = 'SLANT_RANGE'
elif meta['acquisition_mode'] in ['APP', 'IMP']:
meta['product'] = 'PRI'
elif meta['acquisition_mode'] in ['APM', 'IMM', 'WSM']:
meta['product'] = 'MR'
else:
raise RuntimeError(f"unsupported acquisition mode: '{meta['acquisition_mode']}'")
def val_convert(val):
try:
out = int(val)
except ValueError:
try:
out = float(val)
except ValueError:
if re.search('[0-9]{2}-[A-Z]{3}-[0-9]{2}', val):
out = dateparse(val)
out = out.replace(tzinfo=timezone.utc)
else:
out = val
return out
def decode(raw):
pattern = r'(?P[A-Z0-9_]+)\=(")?(?P.*?)("|<|$)'
out = {}
coord_keys = [f'{x}_{y}_{z}'
for x in ['FIRST', 'LAST']
for y in ['NEAR', 'MID', 'FAR']
for z in ['LAT', 'LONG']]
lines = raw.split('\n')
for line in lines:
match = re.match(pattern, line)
if match:
matchdict = match.groupdict()
val = val_convert(str(matchdict['value']).strip())
if matchdict['key'] in coord_keys:
val *= 10 ** -6
out[matchdict['key']] = val
return out
with self.getFileObj(self.file) as obj:
origin = {}
mph = obj.read(1247).decode('ascii')
origin['MPH'] = decode(mph)
sph_size = origin['MPH']['SPH_SIZE']
dsd_size = origin['MPH']['DSD_SIZE']
dsd_num = origin['MPH']['NUM_DSD']
sph_descr_size = sph_size - dsd_size * dsd_num
sph = obj.read(sph_descr_size).decode('ascii')
origin['SPH'] = decode(sph)
datasets = {}
for i in range(dsd_num):
dsd = obj.read(dsd_size).decode('ascii')
dataset = decode(dsd)
datasets[dataset.pop('DS_NAME')] = dataset
origin['DSD'] = datasets
meta['origin'] = origin
key = 'GEOLOCATION GRID ADS'
ds_offset = origin['DSD'][key]['DS_OFFSET']
ds_size = origin['DSD'][key]['DS_SIZE']
dsr_size = origin['DSD'][key]['DSR_SIZE']
obj.seek(ds_offset)
geo = obj.read(ds_size)
geo = [geo[i:i + dsr_size] for i in range(0, len(geo), dsr_size)]
keys = ['first_zero_doppler_time', 'attach_flag', 'line_num',
'num_lines', 'sub_sat_track', 'first_line_tie_points',
'spare', 'last_zero_doppler_time', 'last_line_tie_points',
'swath_number']
lengths = [12, 1, 4, 4, 4, 220, 22, 12, 220, 3, 19]
meta['origin']['GEOLOCATION_GRID_ADS'] = []
for granule in geo:
start = 0
values = {}
for i, key in enumerate(keys):
value = granule[start:sum(lengths[:i + 1])]
if key in ['first_zero_doppler_time', 'last_zero_doppler_time']:
unpack = dict(zip(('days', 'seconds', 'microseconds'),
struct.unpack('>lLL', value)))
value = datetime(year=2000, month=1, day=1, tzinfo=timezone.utc)
value += timedelta(**unpack)
elif key in ['attach_flag']:
value = struct.unpack('B', value)[0]
elif key in ['line_num', 'num_lines']:
value = struct.unpack('>L', value)[0]
elif key in ['sub_sat_track']:
value = struct.unpack('>f', value)[0]
elif key in ['first_line_tie_points', 'last_line_tie_points']:
sample_numbers = struct.unpack('>' + 'L' * 11, value[0:44])
slant_range_times = struct.unpack('>' + 'f' * 11, value[44:88])
incident_angles = struct.unpack('>' + 'f' * 11, value[88:132])
latitudes = struct.unpack('>' + 'l' * 11, value[132:176])
latitudes = [x / 1000000. for x in latitudes]
longitudes = struct.unpack('>' + 'l' * 11, value[176:220])
longitudes = [x / 1000000. for x in longitudes]
value = []
for j in range(11):
value.append({'sample_number': sample_numbers[j],
'slant_range_time': slant_range_times[j],
'incident_angle': incident_angles[j],
'latitude': latitudes[j],
'longitude': longitudes[j]})
elif key == 'swath_number':
value = value.decode('ascii').strip()
if key != 'spare':
values[key] = value
start += lengths[i]
meta['origin']['GEOLOCATION_GRID_ADS'].append(values)
lat = []
lon = []
for granule in meta['origin']['GEOLOCATION_GRID_ADS']:
for group in ['first', 'last']:
for i in range(11):
lat.append(granule[f'{group}_line_tie_points'][i]['latitude'])
lon.append(granule[f'{group}_line_tie_points'][i]['longitude'])
meta['coordinates'] = list(zip(lon, lat))
if meta['sensor'] == 'ASAR':
pols = [y for x, y in origin['SPH'].items() if 'TX_RX_POLAR' in x]
pols = [x.replace('/', '') for x in pols if len(x) == 3]
meta['polarizations'] = sorted(pols)
elif meta['sensor'] in ['ERS1', 'ERS2']:
meta['polarizations'] = ['VV']
meta['orbit'] = origin['SPH']['PASS'][0]
meta['start'] = origin['MPH']['SENSING_START'].strftime('%Y%m%dT%H%M%S')
meta['stop'] = origin['MPH']['SENSING_STOP'].strftime('%Y%m%dT%H%M%S')
meta['spacing'] = (origin['SPH']['RANGE_SPACING'], origin['SPH']['AZIMUTH_SPACING'])
meta['looks'] = (origin['SPH']['RANGE_LOOKS'], origin['SPH']['AZIMUTH_LOOKS'])
meta['samples'] = origin['SPH']['LINE_LENGTH']
meta['lines'] = origin['DSD']['MDS1']['NUM_DSR']
meta['orbitNumber_abs'] = origin['MPH']['ABS_ORBIT']
meta['orbitNumber_rel'] = origin['MPH']['REL_ORBIT']
meta['cycleNumber'] = origin['MPH']['CYCLE']
meta['frameNumber'] = origin['MPH']['ABS_ORBIT']
incident_angles = []
for item in meta['origin']['GEOLOCATION_GRID_ADS']:
for key in ['first', 'last']:
pts = item[f'{key}_line_tie_points']
for pt in pts:
incident_angles.append(pt['incident_angle'])
meta['incidence_nr'] = min(incident_angles)
meta['incidence_fr'] = max(incident_angles)
meta['incidence'] = (meta['incidence_nr'] + meta['incidence_fr']) / 2
resolution_rg, resolution_az, nesz_nr, nesz_fr = \
get_resolution_nesz(sensor=meta['sensor'], mode=meta['acquisition_mode'],
swath_id=origin['SPH']['SWATH'], date=meta['start'])
meta['resolution'] = (resolution_rg, resolution_az)
meta['nesz'] = (nesz_nr, nesz_fr)
meta['projection'] = crsConvert(4326, 'wkt')
return meta
def geo_grid(self, outname=None, driver=None, overwrite=True):
"""
get the geo grid as vector geometry
Parameters
----------
outname: str
the name of the vector file to be written
driver: str
the output file format; needs to be defined if the format cannot
be auto-detected from the filename extension
overwrite: bool
overwrite an existing vector file?
Returns
-------
spatialist.vector.Vector or None
the vector object if `outname` is None, None otherwise
See also
--------
spatialist.vector.Vector.write
"""
vec = Vector(driver='MEM')
vec.addlayer('geogrid', 4326, ogr.wkbPoint)
field_defs = [
("swath", ogr.OFTString),
("azimuthTime", ogr.OFTDateTime),
("slantRangeTime", ogr.OFTReal),
("line", ogr.OFTInteger),
("pixel", ogr.OFTInteger),
("incidenceAngle", ogr.OFTReal)
]
for name, ftype in field_defs:
field = ogr.FieldDefn(name, ftype)
vec.layer.CreateField(field)
for granule in self.meta['origin']['GEOLOCATION_GRID_ADS']:
line_first = granule['line_num']
line_last = granule['line_num'] + granule['num_lines'] - 1
for group in ['first', 'last']:
meta = {'swath': granule['swath_number'],
'azimuthTime': granule[f'{group}_zero_doppler_time'],
'line': line_first if group == 'first' else line_last}
tp = granule[f'{group}_line_tie_points']
for i in range(11):
x = tp[i]['longitude']
y = tp[i]['latitude']
geom = ogr.Geometry(ogr.wkbPoint)
geom.AddPoint(x, y)
geom.FlattenTo2D()
meta['slantRangeTime'] = tp[i]['slant_range_time']
meta['pixel'] = tp[i]['sample_number']
meta['incidenceAngle'] = tp[i]['incident_angle']
vec.addfeature(geom, fields=meta)
geom = None
if outname is None:
return vec
else:
vec.write(outfile=outname, driver=driver, overwrite=overwrite)
def unpack(self, directory, overwrite=False, exist_ok=False):
base_file = os.path.basename(self.file).strip(r'\.zip|\.tar(?:\.gz|)')
base_dir = os.path.basename(directory.strip('/'))
outdir = directory if base_file == base_dir else os.path.join(directory, base_file)
self._unpack(outdir, overwrite=overwrite, exist_ok=exist_ok)
class SAFE(ID):
"""
Handler class for Sentinel-1 data
Sensors:
* S1A
* S1B
* S1C
* S1D
References:
* S1-RS-MDA-52-7443 Sentinel-1 IPF Auxiliary Product Specification
* MPC-0243 Masking "No-value" Pixels on GRD Products generated by the Sentinel-1 ESA IPF
"""
def __init__(self, scene):
self.scene = os.path.realpath(scene)
self.pattern = patterns.safe
self.pattern_ds = r'^s1[abcd]-' \
r'(?Ps[1-6]|iw[1-3]?|ew[1-5]?|wv[1-2]|n[1-6])-' \
r'(?Pslc|grd|ocn)-' \
r'(?Phh|hv|vv|vh)-' \
r'(?P[0-9]{8}t[0-9]{6})-' \
r'(?P[0-9]{8}t[0-9]{6})-' \
r'(?:[0-9]{6})-(?:[0-9a-f]{6})-' \
r'(?P[0-9]{3})' \
r'\.xml$'
self.examine(include_folders=True)
if not re.match(re.compile(self.pattern), os.path.basename(self.file)):
raise RuntimeError('folder does not match S1 scene naming convention')
# scan the metadata XML file and add selected attributes to a meta dictionary
self.meta = self.scanMetadata()
self.meta['projection'] = crsConvert(4326, 'wkt')
# register the standardized meta attributes as object attributes
super(SAFE, self).__init__(self.meta)
self.gammafiles = {'slc': [], 'pri': [], 'grd': []}
def removeGRDBorderNoise(self, method='pyroSAR'):
"""
mask out Sentinel-1 image border noise.
Parameters
----------
method: str
the border noise removal method to be applied; one of the following:
- 'ESA': the pure implementation as described by ESA
- 'pyroSAR': the ESA method plus the custom pyroSAR refinement
Returns
-------
See Also
--------
:func:`~pyroSAR.S1.removeGRDBorderNoise`
"""
S1.removeGRDBorderNoise(self, method=method)
def geo_grid(self, outname=None, driver=None, overwrite=True):
"""
get the geo grid as vector geometry
Parameters
----------
outname: str
the name of the vector file to be written
driver: str
the output file format; needs to be defined if the format cannot
be auto-detected from the filename extension
overwrite: bool
overwrite an existing vector file?
Returns
-------
~spatialist.vector.Vector or None
the vector object if `outname` is None, None otherwise
See also
--------
spatialist.vector.Vector.write
"""
annotations = self.findfiles(self.pattern_ds)
key = lambda x: re.search('-[vh]{2}-', x).group()
groups = groupby(sorted(annotations, key=key), key=key)
annotations = [list(value) for key, value in groups][0]
vec = Vector(driver='MEM')
vec.addlayer('geogrid', 4326, ogr.wkbPoint25D)
field_defs = [
("swath", ogr.OFTString),
("azimuthTime", ogr.OFTDateTime),
("slantRangeTime", ogr.OFTReal),
("line", ogr.OFTInteger),
("pixel", ogr.OFTInteger),
("incidenceAngle", ogr.OFTReal),
("elevationAngle", ogr.OFTReal),
]
for name, ftype in field_defs:
field = ogr.FieldDefn(name, ftype)
vec.layer.CreateField(field)
for ann in annotations:
with self.getFileObj(ann) as ann_xml:
tree = ET.fromstring(ann_xml.read())
swath = tree.find(".//adsHeader/swath").text
points = tree.findall(".//geolocationGridPoint")
for point in points:
meta = {child.tag: child.text for child in point}
meta["swath"] = swath
x = float(meta.pop("longitude"))
y = float(meta.pop("latitude"))
z = float(meta.pop("height"))
geom = ogr.Geometry(ogr.wkbPoint25D)
geom.AddPoint(x, y, z)
az_time = dateparse(meta["azimuthTime"])
meta["azimuthTime"] = az_time.replace(tzinfo=timezone.utc)
for key in ["slantRangeTime", "incidenceAngle", "elevationAngle"]:
meta[key] = float(meta[key])
for key in ["line", "pixel"]:
meta[key] = int(meta[key])
vec.addfeature(geom, fields=meta)
geom = None
if outname is None:
return vec
else:
vec.write(outfile=outname, driver=driver, overwrite=overwrite)
def getOSV(self, osvdir=None, osvType='POE', returnMatch=False, useLocal=True, timeout=300, url_option=1):
"""
download Orbit State Vector files for the scene
Parameters
----------
osvdir: str
the directory of OSV files; subdirectories POEORB and RESORB are created automatically;
if no directory is defined, the standard SNAP auxdata location is used
osvType: str or list[str]
the type of orbit file either 'POE', 'RES' or a list of both;
if both are selected, the best matching file will be retrieved. I.e., POE if available and RES otherwise
returnMatch: bool
return the best matching orbit file?
useLocal: bool
use locally existing files and do not search for files online if the right file has been found?
timeout: int or tuple or None
the timeout in seconds for downloading OSV files as provided to :func:`requests.get`
url_option: int
the OSV download URL option; see :meth:`pyroSAR.S1.OSV.catch` for options
Returns
-------
str or None
the best matching OSV file if `returnMatch` is True or None otherwise
See Also
--------
:class:`pyroSAR.S1.OSV`
"""
with S1.OSV(osvdir, timeout=timeout) as osv:
if useLocal:
match = osv.match(sensor=self.sensor, timestamp=self.start,
osvtype=osvType)
if match is not None:
return match if returnMatch else None
if osvType in ['POE', 'RES']:
files = osv.catch(sensor=self.sensor, osvtype=osvType,
start=self.start, stop=self.stop,
url_option=url_option)
elif sorted(osvType) == ['POE', 'RES']:
files = osv.catch(sensor=self.sensor, osvtype='POE',
start=self.start, stop=self.stop,
url_option=url_option)
if len(files) == 0:
files = osv.catch(sensor=self.sensor, osvtype='RES',
start=self.start, stop=self.stop,
url_option=url_option)
else:
msg = "osvType must either be 'POE', 'RES' or a list of both"
raise TypeError(msg)
osv.retrieve(files)
if returnMatch:
match = osv.match(sensor=self.sensor, timestamp=self.start,
osvtype=osvType)
return match
def quicklook(self, outname, format='kmz', na_transparent=True):
"""
Write a quicklook file for the scene.
Parameters
----------
outname: str
the file to write
format: str
the quicklook format. Currently supported options:
- kmz
na_transparent: bool
make NA values transparent?
Returns
-------
"""
if self.product not in ['GRD', 'SLC']:
msg = 'this method has only been implemented for GRD and SLC, not {}'
raise RuntimeError(msg.format(self.product))
if format != 'kmz':
raise RuntimeError('currently only kmz is supported as format')
kml_name = self.findfiles('map-overlay.kml')[0]
png_name = self.findfiles('quick-look.png')[0]
with zf.ZipFile(outname, 'w') as out:
with self.getFileObj(kml_name) as kml_in:
kml = kml_in.getvalue().decode('utf-8')
kml = kml.replace('Sentinel-1 Map Overlay', self.outname_base())
out.writestr('doc.kml', data=kml)
with self.getFileObj(png_name) as png_in:
if na_transparent:
img = Image.open(png_in)
img = img.convert('RGBA')
datas = img.getdata()
newData = []
for item in datas:
if item[0] == 0 and item[1] == 0 and item[2] == 0:
newData.append((0, 0, 0, 0))
else:
newData.append(item)
img.putdata(newData)
buf = BytesIO()
img.save(buf, format='png')
out.writestr('quick-look.png', buf.getvalue())
else:
out.writestr('quick-look.png', data=png_in.getvalue())
def resolution(self):
"""
Compute the mid-swath resolution of the Sentinel-1 product. For GRD products the resolution is expressed in
ground range and in slant range otherwise.
References:
* https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/resolutions/level-1-single-look-complex
* https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/resolutions/level-1-ground-range-detected
* https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/document-library/-/asset_publisher/1dO7RF5fJMbd/content/sentinel-1-product-definition
Returns
-------
tuple[float]
the resolution as (range, azimuth)
"""
if 'resolution' in self.meta.keys():
return self.meta['resolution']
if self.product not in ['GRD', 'SLC']:
msg = 'this method has only been implemented for GRD and SLC, not {}'
raise RuntimeError(msg.format(self.product))
annotations = self.findfiles(self.pattern_ds)
key = lambda x: re.search('-[vh]{2}-', x).group()
groups = groupby(sorted(annotations, key=key), key=key)
annotations = [list(value) for key, value in groups][0]
proc_pars = [] # processing parameters per sub-swath
sp_az = [] # azimuth pixel spacings per sub-swath
ti_az = [] # azimuth time intervals per sub-swath
for ann in annotations:
with self.getFileObj(ann) as ann_xml:
tree = ET.fromstring(ann_xml.read())
par = tree.findall('.//swathProcParams')
proc_pars.extend(par)
for i in range(len(par)):
sp_az.append(float(tree.find('.//azimuthPixelSpacing').text))
ti_az.append(float(tree.find('.//azimuthTimeInterval').text))
c = 299792458.0 # speed of light
# see Sentinel-1 product definition for Hamming window coefficients
# and Impulse Response Width (IRW) broadening factors:
coefficients = [0.52, 0.6, 0.61, 0.62, 0.63, 0.65, 0.70, 0.72, 0.73, 0.75]
b_factors = [1.54, 1.32, 1.3, 1.28, 1.27, 1.24, 1.18, 1.16, 1.15, 1.13]
resolutions_rg = []
resolutions_az = []
for i, par in enumerate(proc_pars):
# computation of slant range resolution
rg_proc = par.find('rangeProcessing')
wrg = float(rg_proc.find('windowCoefficient').text)
brg = float(rg_proc.find('processingBandwidth').text)
lbrg = float(rg_proc.find('lookBandwidth').text)
lrg = brg / lbrg
kbrg = b_factors[coefficients.index(wrg)]
resolutions_rg.append(0.886 * c / (2 * brg) * kbrg * lrg)
# computation of azimuth resolution; yet to be checked for correctness
az_proc = par.find('azimuthProcessing')
waz = float(az_proc.find('windowCoefficient').text)
baz = float(az_proc.find('processingBandwidth').text)
lbaz = float(az_proc.find('lookBandwidth').text)
laz = baz / lbaz
kbaz = b_factors[coefficients.index(waz)]
vsat = sp_az[i] / ti_az[i]
resolutions_az.append(0.886 * vsat / baz * kbaz * laz)
resolution_rg = median(resolutions_rg)
resolution_az = median(resolutions_az)
if self.meta['image_geometry'] == 'GROUND_RANGE':
resolution_rg /= math.sin(math.radians(self.meta['incidence']))
self.meta['resolution'] = resolution_rg, resolution_az
return self.meta['resolution']
def scanMetadata(self):
with self.getFileObj(self.findfiles('manifest.safe')[0]) as input:
manifest = input.getvalue()
namespaces = getNamespaces(manifest)
tree = ET.fromstring(manifest)
meta = dict()
key = 's1sarl1'
obj_prod = tree.find('.//{}:productType'.format(key), namespaces)
if obj_prod == None:
key = 's1sarl2'
obj_prod = tree.find('.//{}:productType'.format(key), namespaces)
meta['product'] = obj_prod.text
acqmode = tree.find('.//{}:mode'.format(key), namespaces).text
if acqmode == 'SM':
meta['acquisition_mode'] = tree.find('.//{}:swath'.format(key), namespaces).text
else:
meta['acquisition_mode'] = acqmode
meta['acquisition_time'] = dict(
[(x, tree.find('.//safe:{}Time'.format(x), namespaces).text) for x in ['start', 'stop']])
meta['start'], meta['stop'] = (self.parse_date(meta['acquisition_time'][x]) for x in ['start', 'stop'])
meta['coordinates'] = [tuple([float(y) for y in x.split(',')][::-1]) for x in
tree.find('.//gml:coordinates', namespaces).text.split()]
meta['orbit'] = tree.find('.//s1:pass', namespaces).text[0]
meta['orbitNumber_abs'] = int(tree.find('.//safe:orbitNumber[@type="start"]', namespaces).text)
meta['orbitNumber_rel'] = int(tree.find('.//safe:relativeOrbitNumber[@type="start"]', namespaces).text)
meta['cycleNumber'] = int(tree.find('.//safe:cycleNumber', namespaces).text)
meta['frameNumber'] = int(tree.find('.//{}:missionDataTakeID'.format(key), namespaces).text)
meta['orbitNumbers_abs'] = dict(
[(x, int(tree.find('.//safe:orbitNumber[@type="{0}"]'.format(x), namespaces).text)) for x in
['start', 'stop']])
meta['orbitNumbers_rel'] = dict(
[(x, int(tree.find('.//safe:relativeOrbitNumber[@type="{0}"]'.format(x), namespaces).text)) for x in
['start', 'stop']])
key_pol = './/{}:transmitterReceiverPolarisation'.format(key)
meta['polarizations'] = [x.text for x in tree.findall(key_pol, namespaces)]
meta['category'] = tree.find('.//{}:productClass'.format(key), namespaces).text
family = tree.find('.//safe:familyName', namespaces).text.replace('ENTINEL-', '')
number = tree.find('.//safe:number', namespaces).text
meta['sensor'] = family + number
meta['IPF_version'] = float(tree.find('.//safe:software', namespaces).attrib['version'])
sliced = tree.find('.//{}:sliceProductFlag'.format(key), namespaces).text == 'true'
if sliced:
meta['sliceNumber'] = int(tree.find('.//{}:sliceNumber'.format(key), namespaces).text)
meta['totalSlices'] = int(tree.find('.//{}:totalSlices'.format(key), namespaces).text)
else:
meta['sliceNumber'] = None
meta['totalSlices'] = None
if meta['product'] == 'OCN':
meta['spacing'] = -1
meta['samples'] = -1
meta['lines'] = -1
else:
annotations = self.findfiles(self.pattern_ds)
key = lambda x: re.search('-[vh]{2}-', x).group()
groups = groupby(sorted(annotations, key=key), key=key)
annotations = [list(value) for key, value in groups][0]
ann_trees = []
for ann in annotations:
with self.getFileObj(ann) as ann_xml:
ann_trees.append(ET.fromstring(ann_xml.read()))
sp_rg = [float(x.find('.//rangePixelSpacing').text) for x in ann_trees]
sp_az = [float(x.find('.//azimuthPixelSpacing').text) for x in ann_trees]
meta['spacing'] = (median(sp_rg), median(sp_az))
looks_rg = [float(x.find('.//rangeProcessing/numberOfLooks').text) for x in ann_trees]
looks_az = [float(x.find('.//azimuthProcessing/numberOfLooks').text) for x in ann_trees]
meta['looks'] = (median(looks_rg), median(looks_az))
samples = [x.find('.//imageAnnotation/imageInformation/numberOfSamples').text for x in ann_trees]
meta['samples'] = sum([int(x) for x in samples])
lines = [x.find('.//imageAnnotation/imageInformation/numberOfLines').text for x in ann_trees]
meta['lines'] = sum([int(x) for x in lines])
heading = median(float(x.find('.//platformHeading').text) for x in ann_trees)
meta['heading'] = heading if heading > 0 else heading + 360
incidence = [float(x.find('.//incidenceAngleMidSwath').text) for x in ann_trees]
meta['incidence'] = median(incidence)
meta['image_geometry'] = ann_trees[0].find('.//projection').text.replace(' ', '_').upper()
return meta
def unpack(self, directory, overwrite=False, exist_ok=False):
outdir = os.path.join(directory, os.path.basename(self.file))
self._unpack(outdir, overwrite=overwrite, exist_ok=exist_ok)
class TSX(ID):
"""
Handler class for TerraSAR-X and TanDEM-X data
Sensors:
* TSX1
* TDX1
References:
* TX-GS-DD-3302 TerraSAR-X Basic Product Specification Document
* TX-GS-DD-3303 TerraSAR-X Experimental Product Description
* TD-GS-PS-3028 TanDEM-X Experimental Product Description
* TerraSAR-X Image Product Guide (Airbus Defence and Space)
Acquisition modes:
* ST: Staring Spotlight
* HS: High Resolution SpotLight
* HS300: High Resolution SpotLight 300 MHz
* SL: SpotLight
* SM: StripMap
* SC: ScanSAR
* WS: Wide ScanSAR
Polarisation modes:
* Single (S): all acquisition modes
* Dual (D): High Resolution SpotLight (HS), SpotLight (SL) and StripMap (SM)
* Twin (T): StripMap (SM) (experimental)
* Quad (Q): StripMap (SM) (experimental)
Products:
* SSC: Single Look Slant Range Complex
* MGD: Multi Look Ground Range Detected
* GEC: Geocoded Ellipsoid Corrected
* EEC: Enhanced Ellipsoid Corrected
"""
def __init__(self, scene):
if isinstance(scene, str):
self.scene = os.path.realpath(scene)
self.pattern = patterns.tsx
self.pattern_ds = r'^IMAGE_(?PHH|HV|VH|VV)_(?:SRA|FWD|AFT)_(?P[^\.]+)\.(cos|tif)$'
self.examine(include_folders=False)
if not re.match(re.compile(self.pattern), os.path.basename(self.file)):
raise RuntimeError('folder does not match TSX scene naming convention')
self.meta = self.scanMetadata()
self.meta['projection'] = crsConvert(4326, 'wkt')
super(TSX, self).__init__(self.meta)
def scanMetadata(self):
annotation = self.getFileObj(self.file).getvalue()
namespaces = getNamespaces(annotation)
tree = ET.fromstring(annotation)
meta = dict()
meta['sensor'] = tree.find('.//generalHeader/mission', namespaces).text.replace('-', '')
meta['product'] = tree.find('.//orderInfo/productVariant', namespaces).text
meta['orbit'] = tree.find('.//missionInfo/orbitDirection', namespaces).text[0]
meta['polarizations'] = [x.text for x in
tree.findall('.//acquisitionInfo/polarisationList/polLayer', namespaces)]
meta['orbitNumber_abs'] = int(tree.find('.//missionInfo/absOrbit', namespaces).text)
meta['orbitNumber_rel'] = int(tree.find('.//missionInfo/relOrbit', namespaces).text)
meta['cycleNumber'] = int(tree.find('.//missionInfo/orbitCycle', namespaces).text)
meta['frameNumber'] = int(tree.find('.//inputData/uniqueDataTakeID', namespaces).text)
meta['acquisition_mode'] = tree.find('.//acquisitionInfo/imagingMode', namespaces).text
meta['start'] = self.parse_date(tree.find('.//sceneInfo/start/timeUTC', namespaces).text)
meta['stop'] = self.parse_date(tree.find('.//sceneInfo/stop/timeUTC', namespaces).text)
spacing_row = float(tree.find('.//imageDataInfo/imageRaster/rowSpacing', namespaces).text)
spacing_col = float(tree.find('.//imageDataInfo/imageRaster/columnSpacing', namespaces).text)
meta['spacing'] = (spacing_col, spacing_row)
meta['samples'] = int(tree.find('.//imageDataInfo/imageRaster/numberOfColumns', namespaces).text)
meta['lines'] = int(tree.find('.//imageDataInfo/imageRaster/numberOfRows', namespaces).text)
rlks = float(tree.find('.//imageDataInfo/imageRaster/rangeLooks', namespaces).text)
azlks = float(tree.find('.//imageDataInfo/imageRaster/azimuthLooks', namespaces).text)
meta['looks'] = (rlks, azlks)
meta['incidence'] = float(tree.find('.//sceneInfo/sceneCenterCoord/incidenceAngle', namespaces).text)
geocs = self.getFileObj(self.findfiles('GEOREF.xml')[0]).getvalue()
tree = ET.fromstring(geocs)
pts = tree.findall('.//gridPoint')
lat = [float(x.find('lat').text) for x in pts]
lon = [float(x.find('lon').text) for x in pts]
# shift lon in case of west direction.
lon = [x - 360 if x > 180 else x for x in lon]
meta['coordinates'] = list(zip(lon, lat))
return meta
def unpack(self, directory, overwrite=False, exist_ok=False):
match = self.findfiles(self.pattern, True)
header = [x for x in match if not x.endswith('xml') and 'iif' not in x][0].replace(self.scene, '').strip('/')
outdir = os.path.join(directory, os.path.basename(header))
self._unpack(outdir, offset=header, overwrite=overwrite, exist_ok=exist_ok)
class TDM(TSX):
"""
Handler class for TerraSAR-X and TanDEM-X experimental data
Sensors:
* TDM1
References:
* TD-GS-PS-3028 TanDEM-X Experimental Product Description
Acquisition modes:
* HS: High Resolution SpotLight
* SL: SpotLight
* SM: StripMap
Polarisation modes:
* Single (S): all acquisition modes
* Dual (D): High Resolution SpotLight (HS), SpotLight (SL) and StripMap (SM)
* Twin (T): StripMap (SM) (experimental)
* Quad (Q): StripMap (SM) (experimental)
Products:
* CoSSCs: (bi-static) SAR co-registered single look slant range complex products (CoSSCs)
Examples
----------
Ingest all Tandem-X Bistatic scenes in a directory and its sub-directories into the database:
>>> from pyroSAR import Archive, identify
>>> from spatialist.ancillary import finder
>>> dbfile = '/.../scenelist.db'
>>> archive_tdm = '/.../TDM/'
>>> scenes_tdm = finder(archive_tdm, [r'^TDM1.*'], foldermode=2, regex=True, recursive=True)
>>> with Archive(dbfile) as archive:
>>> archive.insert(scenes_tdm)
"""
def __init__(self, scene):
self.scene = os.path.realpath(scene)
self.pattern = patterns.tdm
self.pattern_ds = r'^IMAGE_(?PHH|HV|VH|VV)_(?:SRA|FWD|AFT)_(?P[^\.]+)\.(cos|tif)$'
self.examine(include_folders=False)
if not re.match(re.compile(self.pattern), os.path.basename(self.file)):
raise RuntimeError('folder does not match TDM scene naming convention')
self.meta = self.scanMetadata()
self.meta['projection'] = crsConvert(4326, 'wkt')
super(TDM, self).__init__(self.meta)
def scanMetadata(self):
annotation = self.getFileObj(self.file).getvalue()
namespaces = getNamespaces(annotation)
tree = ET.fromstring(annotation)
meta = dict()
meta['sensor'] = tree.find('.//commonAcquisitionInfo/missionID', namespaces).text.replace('-', '')
meta['product'] = tree.find('.//productInfo/productType', namespaces).text
meta['SAT1'] = tree.find('.//commonAcquisitionInfo/satelliteIDsat1', namespaces).text
meta['SAT2'] = tree.find('.//commonAcquisitionInfo/satelliteIDsat2', namespaces).text
meta['inSARmasterID'] = tree.find('.//commonAcquisitionInfo/inSARmasterID', namespaces).text
pattern = './/commonAcquisitionInfo/satelliteID{}'.format(meta['inSARmasterID'].lower())
meta['inSARmaster'] = tree.find(pattern, namespaces).text.replace('-', '')
pattern = './/commonAcquisitionInfo/operationsInfo/acquisitionItemID'
meta['acquisitionItemID'] = int(tree.find(pattern, namespaces).text)
meta['effectiveBaseline'] = float(tree.find('.//acquisitionGeometry/effectiveBaseline', namespaces).text)
meta['heightOfAmbiguity'] = float(tree.find('.//acquisitionGeometry/heightOfAmbiguity', namespaces).text)
meta['distanceActivePos'] = float(tree.find('.//acquisitionGeometry/distanceActivePos', namespaces).text)
meta['distanceTracks'] = float(tree.find('.//acquisitionGeometry/distanceTracks', namespaces).text)
meta['cooperativeMode'] = tree.find('.//commonAcquisitionInfo/cooperativeMode', namespaces).text
if meta['cooperativeMode'].lower() == "bistatic":
meta['bistatic'] = True
else:
meta['bistatic'] = False
meta['orbit'] = tree.find('.//acquisitionGeometry/orbitDirection', namespaces).text[0]
pattern = ".//productComponents/component[@componentClass='imageData']/file/location/name"
elements = tree.findall(pattern, )
self.primary_scene = os.path.join(self.scene, elements[0].text)
self.secondary_scene = os.path.join(self.scene, elements[1].text)
meta["SAT1"] = TSX(self.primary_scene).scanMetadata()
meta["SAT2"] = TSX(self.secondary_scene).scanMetadata()
meta['start'] = self.parse_date(tree.find('.//orbitHeader/firstStateTime/firstStateTimeUTC', namespaces).text)
meta['stop'] = self.parse_date(tree.find('.//orbitHeader/lastStateTime/lastStateTimeUTC', namespaces).text)
meta['samples'] = int(tree.find('.//coregistration/coregRaster/samples', namespaces).text)
meta['lines'] = int(tree.find('.//coregistration/coregRaster/lines', namespaces).text)
rlks = float(tree.find('.//processingInfo/inSARProcessing/looks/range', namespaces).text)
azlks = float(tree.find('.//processingInfo/inSARProcessing/looks/azimuth', namespaces).text)
meta['looks'] = (rlks, azlks)
meta['incidence'] = float(tree.find('.//commonSceneInfo/sceneCenterCoord/incidenceAngle', namespaces).text)
meta['orbit'] = meta[meta['inSARmasterID']]['orbit']
meta['polarizations'] = meta[meta['inSARmasterID']]['polarizations']
meta['orbitNumber_abs'] = meta[meta['inSARmasterID']]['orbitNumber_abs']
meta['orbitNumber_rel'] = meta[meta['inSARmasterID']]['orbitNumber_rel']
meta['cycleNumber'] = meta[meta['inSARmasterID']]['cycleNumber']
meta['frameNumber'] = meta[meta['inSARmasterID']]['frameNumber']
meta['acquisition_mode'] = meta[meta['inSARmasterID']]['acquisition_mode']
meta['start'] = meta[meta['inSARmasterID']]['start']
meta['stop'] = meta[meta['inSARmasterID']]['stop']
meta['spacing'] = meta[meta['inSARmasterID']]['spacing']
meta['samples'] = meta[meta['inSARmasterID']]['samples']
meta['lines'] = meta[meta['inSARmasterID']]['lines']
meta['looks'] = meta[meta['inSARmasterID']]['looks']
meta['incidence'] = meta[meta['inSARmasterID']]['incidence']
pts = tree.findall('.//sceneCornerCoord')
lat = [float(x.find('lat').text) for x in pts]
lon = [float(x.find('lon').text) for x in pts]
# shift lon in case of west direction.
lon = [x - 360 if x > 180 else x for x in lon]
meta['coordinates'] = list(zip(lon, lat))
return meta
def getFileObj(scene, filename):
"""
Load a file in a SAR scene archive into a readable file object.
Parameters
----------
scene: str
the scene archive. Can be either a directory or a compressed archive of type `zip` or `tar.gz`.
filename: str
the name of a file in the scene archive, easiest to get with method :meth:`~ID.findfiles`
Returns
-------
~io.BytesIO
a file object
"""
membername = filename.replace(scene, '').strip(r'\/')
if not os.path.exists(scene):
raise RuntimeError('scene does not exist')
if os.path.isdir(scene):
obj = BytesIO()
with open(filename, 'rb') as infile:
obj.write(infile.read())
obj.seek(0)
# the scene consists of a single file
elif os.path.isfile(scene) and scene == filename:
obj = BytesIO()
with open(filename, 'rb') as infile:
obj.write(infile.read())
obj.seek(0)
elif zf.is_zipfile(scene):
obj = BytesIO()
with zf.ZipFile(scene, 'r') as zip:
obj.write(zip.open(membername).read())
obj.seek(0)
elif tf.is_tarfile(scene):
obj = BytesIO()
tar = tf.open(scene, 'r:gz')
obj.write(tar.extractfile(membername).read())
tar.close()
obj.seek(0)
else:
raise RuntimeError('input must be either a file name or a location in an zip or tar archive')
return obj
def parse_date(x):
"""
this function gathers known time formats provided in the different SAR products and converts them to a common
standard of the form YYYYMMDDTHHMMSS
Parameters
----------
x: str or ~datetime.datetime
the time stamp to be converted
Returns
-------
str
the converted time stamp in format YYYYmmddTHHMMSS
"""
if isinstance(x, datetime):
return x.strftime('%Y%m%dT%H%M%S')
elif isinstance(x, str):
for timeformat in ['%d-%b-%Y %H:%M:%S.%f',
'%Y%m%d%H%M%S%f',
'%Y-%m-%dT%H:%M:%S.%f',
'%Y-%m-%dT%H:%M:%S.%fZ',
'%Y%m%d %H:%M:%S.%f']:
try:
return strftime('%Y%m%dT%H%M%S', strptime(x, timeformat))
except (TypeError, ValueError):
continue
raise ValueError('unknown time format; check function parse_date')
else:
raise ValueError('input must be either a string or a datetime object')
================================================
FILE: pyroSAR/examine.py
================================================
###############################################################################
# Examination of SAR processing software
# Copyright (c) 2019-2026, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
import json
import os
import shutil
import re
import warnings
import platform
import subprocess as sp
import importlib.resources
from pyroSAR.config import ConfigHandler
from spatialist.ancillary import finder, run
import logging
log = logging.getLogger(__name__)
__config__ = ConfigHandler()
class ExamineSnap(object):
"""
Class to check if ESA SNAP is installed.
Upon initialization, this class searches for relevant binaries and the accompanying
relative directory structure, which uniquely identify an ESA SNAP installation on a system.
First, all relevant file and folder names are read from the pyroSAR config file if it exists
and their existence is verified.
If this fails, a system check is performed to find relevant binaries in the system PATH variable and
additional files and folders relative to them.
In case SNAP is not installed, a default `snap.auxdata.properties` file delivered with pyroSAR will be copied to
`$HOME/.snap/etc` so that SNAP download URLS and local directory structure can be adapted by other software.
SNAP configuration can be read and modified via the attribute `snap_properties` of type
:class:`~pyroSAR.examine.SnapProperties` or the properties :attr:`~pyroSAR.examine.ExamineSnap.userpath` and
:attr:`~pyroSAR.examine.ExamineSnap.auxdatapath`.
"""
_version_dict = None
def __init__(self):
# update legacy config files
if 'OUTPUT' in __config__.sections:
__config__.remove_section('OUTPUT')
if 'SNAP' in __config__.sections:
snap_keys = __config__.keys('SNAP')
for key in ['auxdata', 'auxdatapath', 'properties']:
if key in snap_keys:
__config__.remove_option(section='SNAP', key=key)
# define some attributes which identify SNAP
self.identifiers = ['path', 'gpt', 'etc']
# a list of relevant sections
self.sections = ['SNAP', 'SNAP_SUFFIX']
# set attributes path, gpt, etc, __suffices
self.__read_config()
# if SNAP could not be identified from the config attributes, do a system search for it
# sets attributes path, gpt, etc
if not self.__is_identified():
log.debug('identifying SNAP')
self.__identify_snap()
# if SNAP cannot be identified, copy the snap.auxdata.properties file to $HOME/.snap/etc
if not self.__is_identified():
self.etc = os.path.join(os.path.expanduser('~'), '.snap', 'etc')
os.makedirs(self.etc, exist_ok=True)
dst = os.path.join(self.etc, 'snap.auxdata.properties')
if not os.path.isfile(dst):
dir_data = importlib.resources.files('pyroSAR') / 'snap' / 'data'
src = str(dir_data / 'snap.auxdata.properties')
log.debug(f'creating {dst}')
shutil.copyfile(src, dst)
# if the SNAP suffices attribute was not yet identified,
# point it to the default file delivered with pyroSAR
if not hasattr(self, '__suffices'):
dir_data = importlib.resources.files('pyroSAR') / 'snap' / 'data'
fname_suffices = str(dir_data / 'snap.suffices.properties')
with open(fname_suffices, 'r') as infile:
content = infile.read().split('\n')
self.__suffices = {k: v for k, v in [x.split('=') for x in content]}
# SNAP property read/modification interface
self.snap_properties = SnapProperties(path=os.path.dirname(self.etc))
# update the config file: this scans for config changes and re-writes the config file if any are found
self.__update_config()
def __getattr__(self, item):
if item in ['path', 'gpt']:
msg = ('SNAP could not be identified. If you have installed it '
'please add the path to the SNAP executables (bin subdirectory) '
'to the PATH environment. E.g. in the Linux .bashrc file add '
'the following line:\nexport PATH=$PATH:path/to/snap/bin"')
else:
msg = "'ExamineSnap' object has no attribute '{}'".format(item)
raise AttributeError(msg)
def __is_identified(self):
"""
Check if SNAP has been properly identified, i.e. all paths in `self.identifiers`
have been detected and confirmed.
Returns
-------
bool
"""
return sum([hasattr(self, x) for x in self.identifiers]) == len(self.identifiers)
def __identify_snap(self):
"""
do a comprehensive search for an ESA SNAP installation
Returns
-------
bool
has the SNAP properties file been changed?
"""
# create a list of possible SNAP executables
defaults = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
paths = os.environ['PATH'].split(os.path.pathsep)
options = [os.path.join(path, option) for path in paths for option in defaults]
options = [x for x in options if os.path.isfile(x)]
if not hasattr(self, 'path') or not os.path.isfile(self.path):
executables = options
else:
executables = [self.path] + options
if len(executables) == 0:
log.debug("could not detect any potential 'snap' executables")
# for each possible SNAP executable, check whether additional files and directories exist relative to it
# to confirm whether it actually is an ESA SNAP installation or something else like e.g. the Ubuntu App Manager
for path in executables:
log.debug('checking candidate {}'.format(path))
if os.path.islink(path):
path = os.path.realpath(path)
# check whether a directory etc exists relative to the SNAP executable
etc = os.path.join(os.path.dirname(os.path.dirname(path)), 'etc')
if not os.path.isdir(etc):
log.debug("could not find the 'etc' directory")
continue
# check the content of the etc directory
config_files = os.listdir(etc)
expected = ['snap.auxdata.properties', 'snap.clusters',
'snap.conf', 'snap.properties']
for name in expected:
if name not in config_files:
log.debug(f"could not find the '{name}' file")
continue
# identify the gpt executable
gpt_candidates = finder(os.path.dirname(path), ['gpt', 'gpt.exe'])
if len(gpt_candidates) == 0:
log.debug("could not find the 'gpt' executable")
continue
else:
gpt = gpt_candidates[0]
self.path = path
self.etc = etc
self.gpt = gpt
return
def __read_config(self):
"""
This method reads the config.ini to examine the snap paths.
If the snap paths are not in the config.ini or the paths are
wrong they will be automatically created.
Returns
-------
"""
for attr in self.identifiers:
self.__read_config_attr(attr, section='SNAP')
suffices = {}
if 'SNAP_SUFFIX' in __config__.sections:
suffices = __config__['SNAP_SUFFIX']
if len(suffices.keys()) > 0:
self.__suffices = suffices
def __read_config_attr(self, attr, section):
"""
read an attribute from the config file and set it as an object attribute
Parameters
----------
attr: str
the attribute name
section: str
the config section to read the attribute from
Returns
-------
"""
if section in __config__.sections:
if attr in __config__[section].keys():
val = __config__[section][attr]
if os.path.exists(val):
# log.info('setting attribute {}'.format(attr))
setattr(self, attr, val)
def __read_version_dict(self):
log.debug('reading SNAP version information')
out = {}
cmd = [self.path, '--nosplash', '--nogui', '--modules',
'--list', '--refresh']
if platform.system() == 'Windows':
cmd.extend(['--console', 'suppress'])
# fix Exception in thread "main" java.awt.AWTError: Can't connect to
# X11 window server using 'xyz' as the value of the DISPLAY variable.
env = os.environ.copy()
env['DISPLAY'] = ''
proc = sp.Popen(args=cmd, stdout=sp.PIPE, stderr=sp.STDOUT,
text=True, encoding='utf-8', bufsize=1,
env=env)
counter = 0
lines = []
lines_info = []
for line in proc.stdout:
line = line.rstrip()
lines.append(line)
if line.startswith('---'):
counter += 1
else:
if counter == 1:
lines_info.append(line)
if counter == 2:
proc.terminate()
proc.wait()
pattern = r'([a-z.]*)\s+([0-9.]+)\s+(.*)'
for line in lines_info:
code, version, state = re.search(pattern=pattern, string=line).groups()
out[code] = {'version': version, 'state': state}
if len(out) == 0:
snap_msg = "\n".join(lines)
raise RuntimeError(f'{snap_msg}\ncould not '
f'read SNAP version information')
return out
def __update_config(self):
for section in self.sections:
if section not in __config__.sections:
# log.info('creating section {}..'.format(section))
__config__.add_section(section)
for key in self.identifiers:
if hasattr(self, key):
self.__update_config_attr(key, getattr(self, key), 'SNAP')
for key in sorted(self.__suffices.keys()):
self.__update_config_attr(key, self.__suffices[key], 'SNAP_SUFFIX')
@staticmethod
def __update_config_attr(attr, value, section):
if isinstance(value, list):
value = json.dumps(value)
if attr not in __config__[section].keys() or __config__[section][attr] != value:
# log.info('updating attribute {0}:{1}..'.format(section, attr))
# log.info(' {0} -> {1}'.format(repr(config[section][attr]), repr(value)))
__config__.set(section, key=attr, value=value, overwrite=True)
def get_suffix(self, operator):
"""
get the file name suffix for an operator
Parameters
----------
operator: str
the name of the operator
Returns
-------
str or None
the file suffix or None if unknown
Examples
--------
>>> from pyroSAR.examine import ExamineSnap
>>> config = ExamineSnap()
>>> print(config.get_suffix('Terrain-Flattening'))
'TF'
"""
if operator in self.__suffices.keys():
return self.__suffices[operator]
else:
return None
def get_version(self, module: str) -> str:
"""
Read the version and date of different SNAP modules.
The following SNAP command is called to get the information:
.. code-block:: bash
snap --nosplash --nogui --modules --list --refresh --console suppress
Parameters
----------
module:
one of the following
- core
- desktop
- rstb
- opttbx
- microwavetbx
Returns
-------
the version number
"""
if ExamineSnap._version_dict is None:
ExamineSnap._version_dict = self.__read_version_dict()
log.debug(f"reading version information for module '{module}'")
patterns = {'core': 'org.esa.snap.snap.core',
'desktop': 'org.esa.snap.snap.ui',
'rstb': 'org.csa.rstb.rstb.kit',
'opttbx': 'eu.esa.opt.opttbx.kit',
'microwavetbx': 'eu.esa.microwavetbx.microwavetbx.kit'}
if module not in patterns.keys():
raise ValueError(f"'{module}' is not a valid module name. "
f"Supported options: {patterns.keys()}")
for k, v in ExamineSnap._version_dict.items():
if patterns[module] == k:
if v['state'] == 'Available':
raise RuntimeError(f'{module} is not installed')
log.debug(f'version is {v["version"]}')
return v['version']
raise RuntimeError(f"Could not find version "
f"information for module '{module}'.")
@property
def auxdatapath(self):
"""
Get/set the SNAP configuration for `AuxDataPath` in `snap.auxdata.properties`.
Example
-------
>>> from pyroSAR.examine import ExamineSnap
>>> config = ExamineSnap()
>>> config.auxdatapath = '/path/to/snap/auxdata'
# This is equivalent to
>>> config.snap_properties['AuxDataPath'] = '/path/to/snap/auxdata'
"""
out = self.snap_properties['AuxDataPath']
if out is None:
out = os.path.join(self.userpath, 'auxdata')
return out
@auxdatapath.setter
def auxdatapath(self, value):
self.snap_properties['AuxDataPath'] = value
@property
def userpath(self):
"""
Get/set the SNAP configuration for `snap.userdir` in `snap.properties`.
Example
-------
>>> from pyroSAR.examine import ExamineSnap
>>> config = ExamineSnap()
>>> config.userpath = '/path/to/snap/data'
# This is equivalent to
>>> config.snap_properties['snap.userdir'] = '/path/to/snap/data'
"""
return self.snap_properties.userpath
@userpath.setter
def userpath(self, value):
self.snap_properties.userpath = value
class ExamineGamma(object):
"""
Class to check if GAMMA is installed.
Examples
--------
>>> from pyroSAR.examine import ExamineGamma
>>> config = ExamineGamma()
>>> print(config.home)
>>> print(config.version)
"""
def __init__(self):
home_sys = os.environ.get('GAMMA_HOME')
if home_sys is not None and not os.path.isdir(home_sys):
warnings.warn('found GAMMA_HOME environment variable, but directory does not exist')
home_sys = None
self.__read_config()
if hasattr(self, 'home'):
if home_sys is not None and self.home != home_sys:
log.info('the value of GAMMA_HOME is different to that in the pyroSAR configuration;\n'
' was: {}\n'
' is : {}\n'
'resetting the configuration and deleting parsed modules'
.format(self.home, home_sys))
parsed = os.path.join(os.path.dirname(self.fname), 'gammaparse')
shutil.rmtree(parsed)
self.home = home_sys
if not hasattr(self, 'home'):
if home_sys is not None:
setattr(self, 'home', home_sys)
else:
raise RuntimeError('could not read GAMMA installation directory')
self.version = re.search('GAMMA_SOFTWARE[-/](?P[0-9]{8})',
getattr(self, 'home')).group('version')
try:
returncode, out, err = run(['which', 'gdal-config'], void=False)
gdal_config = out.strip('\n')
self.gdal_config = gdal_config
except sp.CalledProcessError:
raise RuntimeError('could not find command gdal-config.')
self.__update_config()
def __read_config(self):
self.fname = __config__.file
if 'GAMMA' in __config__.sections:
attr = __config__['GAMMA']
for key, value in attr.items():
setattr(self, key, value)
def __update_config(self):
if 'GAMMA' not in __config__.sections:
__config__.add_section('GAMMA')
for attr in ['home', 'version']:
self.__update_config_attr(attr, getattr(self, attr), 'GAMMA')
@staticmethod
def __update_config_attr(attr, value, section):
if isinstance(value, list):
value = json.dumps(value)
if attr not in __config__[section].keys() or __config__[section][attr] != value:
__config__.set(section, key=attr, value=value, overwrite=True)
class SnapProperties(object):
"""
SNAP configuration interface. This class enables reading and modifying
SNAP configuration in properties files. Modified properties are directly
written to the files.
Currently, the files `snap.properties`, `snap.auxdata.properties` and `snap.conf`
are supported. These files can be found in two locations:
- `/etc`
- `/.snap/etc`
Configuration in the latter has higher priority, and modified properties will
always be written there so that the installation directory is not modified.
Parameters
----------
path: str
SNAP installation directory path
Examples
--------
>>> from pyroSAR.examine import ExamineSnap, SnapProperties
>>> path = ExamineSnap().path
>>> config = SnapProperties(path=path)
>>> config['snap.userdir'] = '/path/to/snap/auxdata'
"""
def __init__(self, path):
self.pattern = r'^(?P#?)(?P[\w\.]*)[ ]*=[ ]*"?(?P[^"\n]*)"?\n*'
self.pattern_key_replace = r'#?{}[ ]*=[ ]*(?P.*)'
self.properties_path = os.path.join(path, 'etc', 'snap.properties')
log.debug(f"reading {self.properties_path}")
self.properties = self._to_dict(self.properties_path)
self.properties.update(self._to_dict(self.userpath_properties))
self.auxdata_properties_path = os.path.join(path, 'etc', 'snap.auxdata.properties')
log.debug(f"reading {self.auxdata_properties_path}")
self.auxdata_properties = self._to_dict(self.auxdata_properties_path)
self.auxdata_properties.update(self._to_dict(self.userpath_auxdata_properties))
self.conf_path = os.path.join(path, 'etc', 'snap.conf')
log.debug(f"reading {self.conf_path}")
str_split = {'default_options': ' '}
self.conf = self._to_dict(path=self.conf_path, str_split=str_split)
self.conf.update(self._to_dict(self.userpath_conf, str_split=str_split))
self._dicts = [self.properties, self.auxdata_properties, self.conf]
# removing this because of
# "RuntimeError: OpenJDK 64-Bit Server VM warning: Options
# -Xverify:none and -noverify were deprecated in JDK 13 and will
# likely be removed in a future release."
if '-J-Xverify:none' in self.conf['default_options']:
opts = self.conf['default_options'].copy()
opts.remove('-J-Xverify:none')
self['default_options'] = opts
# some properties need to be read from the default user path to
# be visible to SNAP
pairs = [(self.userpath_properties, self.properties_path),
(self.userpath_auxdata_properties, self.auxdata_properties_path)]
for default, defined in pairs:
if default != defined:
conf = self._to_dict(default)
if len(conf.keys()) > 0:
log.debug(f"updating keys {list(conf.keys())} from {default}")
self.properties.update(conf)
def __getitem__(
self,
key: str
) -> int | float | str | list[str]:
for section in self._dicts:
if key in section:
return section[key].copy() \
if hasattr(section[key], 'copy') \
else section[key]
raise KeyError(f'could not find key {key}')
def __setitem__(
self,
key: str,
value: int | float | str | list[str] | None
) -> None:
if not (isinstance(value, (int, float, str, list)) or value is None):
raise TypeError(f'invalid type for key {key}: {type(value)}')
if value == self[key] and isinstance(value, type(self[key])):
return
if key in self.properties:
self.properties[key] = value
elif key in self.auxdata_properties:
self.auxdata_properties[key] = value
else:
self.conf[key] = value
if value is not None:
if isinstance(value, list):
value = ' '.join(value)
value = str(value).encode('unicode-escape').decode()
value = value.replace(':', '\\:')
if key in self.properties:
path = self.userpath_properties
elif key in self.auxdata_properties:
path = self.userpath_auxdata_properties
elif key in self.conf:
path = self.userpath_conf
else:
raise KeyError(f'unknown key {key}')
if os.path.isfile(path):
with open(path, 'r') as f:
content = f.read()
else:
content = ''
pattern = self.pattern_key_replace.format(key)
match = re.search(pattern, content)
if match:
repl = f'#{key} =' if value is None else f'{key} = {value}'
content = content.replace(match.group(), repl)
else:
content += f'\n{key} = {value}'
os.makedirs(os.path.dirname(path), exist_ok=True)
log.debug(f"writing key '{key}' to '{path}'")
with open(path, 'w') as f:
f.write(content)
def _to_dict(
self,
path: str,
str_split: dict[str, str] | None = None
) -> dict[str, int | float | str | None | list[str]]:
"""
Read a properties file into a dictionary.
Converts values into basic python types
Parameters
----------
path:
the path to the properties file
str_split:
a dictionary with properties as keys and splitting characters as values
to split a string into a list of strings
Returns
-------
the dictionary with the properties
"""
out = {}
if os.path.isfile(path):
with open(path, 'r') as f:
for line in f:
if re.search(self.pattern, line):
match = re.match(re.compile(self.pattern), line)
comment, key, value = match.groups()
if comment == '':
if str_split is not None and key in str_split.keys():
value = value.split(str_split[key])
else:
value = self._string_convert(value)
out[key] = value
else:
out[key] = None
return out
@staticmethod
def _string_convert(string):
if string.lower() == 'none':
return None
elif string.lower() == 'true':
return True
elif string.lower() == 'false':
return False
else:
try:
return int(string)
except ValueError:
try:
return float(string)
except ValueError:
return string.replace('\\:', ':').replace('\\\\', '\\')
def keys(self):
"""
Returns
-------
list[str]
all known SNAP property keys
"""
keys = []
for item in self._dicts:
keys.extend(list(item.keys()))
return sorted(keys)
@property
def userpath(self):
key = 'snap.userdir'
if key not in self.keys() or self[key] is None:
return os.path.join(os.path.expanduser('~'), '.snap')
else:
return self[key]
@userpath.setter
def userpath(self, value):
self['snap.userdir'] = value
@property
def userpath_auxdata_properties(self):
return os.path.join(os.path.expanduser('~'), '.snap',
'etc', 'snap.auxdata.properties')
@property
def userpath_properties(self):
return os.path.join(os.path.expanduser('~'), '.snap',
'etc', 'snap.properties')
@property
def userpath_conf(self):
return os.path.join(os.path.expanduser('~'), '.snap',
'etc', 'snap.conf')
================================================
FILE: pyroSAR/gamma/__init__.py
================================================
from .auxil import process, ISPPar, UTM, Spacing, Namespace, slc_corners, par2hdr
from .util import calibrate, convert2gamma, correctOSV, geocode, multilook, ovs, S1_deburst
from . import dem
================================================
FILE: pyroSAR/gamma/api.py
================================================
###############################################################################
# import wrapper for the pyroSAR GAMMA API
# Copyright (c) 2018-2019, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
import os
import sys
import warnings
from .parser import autoparse
try:
autoparse()
sys.path.insert(0, os.path.join(os.path.expanduser('~'), '.pyrosar'))
try:
from gammaparse import *
except ImportError:
warnings.warn('found a GAMMA installation directory, but module parsing failed')
except RuntimeError:
warnings.warn('could not find GAMMA installation directory; please set the GAMMA_HOME environment variable')
================================================
FILE: pyroSAR/gamma/auxil.py
================================================
###############################################################################
# general GAMMA utilities
# Copyright (c) 2014-2026, the pyroSAR Developers, Stefan Engelhardt.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
################################################################################
import math
import os
import re
import string
import codecs
import subprocess as sp
from datetime import datetime, timedelta
from pyroSAR.examine import ExamineGamma
from spatialist.ancillary import parse_literal, run, union, dissolve
from spatialist.envi import hdr
from .error import gammaErrorHandler
def do_execute(par, ids, exist_ok):
"""
small helper function to assess whether a GAMMA command shall be executed.
Parameters
----------
par: dict
a dictionary containing all arguments for the command
ids: list[str]
the IDs of the output files
exist_ok: bool
allow existing output files?
Returns
-------
bool
execute the command because (a) not all output files exist or (b) existing files are not allowed
"""
all_exist = all([os.path.isfile(par[x]) for x in ids if par[x] != '-'])
return (exist_ok and not all_exist) or not exist_ok
class ISPPar(object):
"""
Reader for ISP parameter files of the GAMMA software package
This class allows to read all information from files in GAMMA's parameter file format.
Each key-value pair is parsed and added as attribute. For instance if the parameter file
contains the pair 'sensor: TSX-1' an attribute named 'sensor' with the value 'TSX-1' will be available.
The values are converted to native Python types, while unit identifiers like 'dB' or 'Hz' are removed.
Please see the GAMMA reference manual for further information on the actual file format.
Parameters
----------
filename: str
the GAMMA parameter file
Examples
--------
>>> from pyroSAR.gamma import ISPPar
>>> with ISPPar('S1A__IW___A_20141115T181801_VH_grd.par') as par:
... print(par) # print an overview of all available metadata
... print(par.keys) # print all parameter names
... for key, value in par.envidict().items():
... print('{0}: {1}'.format(key, value)) # print the ENVI HDR compliant metadata
Attributes
----------
keys: list
the names of all parameters
"""
_re_kv_pair = re.compile(r'^(\w+):\s*(.+)\s*')
_re_float_literal = re.compile(r'^[+-]?(?:(\d*\.\d+)|(\d+\.?))(?:[Ee][+-]?\d+)?')
def __init__(self, filename):
"""Parses an ISP parameter file from disk.
Args:
filename: The filename or file object representing the ISP parameter file.
"""
if isinstance(filename, str):
par_file = open(filename, 'r')
else:
par_file = filename
self.keys = ['filetype']
try:
content = par_file.read().split('\n')
except UnicodeDecodeError:
par_file = codecs.open(filename, 'r', encoding='utf-8', errors='ignore')
content = par_file.read()
printable = set(string.printable)
content = filter(lambda x: x in printable, content)
content = ''.join(list(content)).split('\n')
finally:
par_file.close()
if 'Image Parameter File' in content[0]:
setattr(self, 'filetype', 'isp')
elif 'DEM/MAP parameter file' in content[0]:
setattr(self, 'filetype', 'dem')
else:
raise RuntimeError('unknown parameter file type')
for line in content:
match = ISPPar._re_kv_pair.match(line)
if not match:
continue # Skip malformed lines with no key-value pairs
key = match.group(1)
items = match.group(2).split()
if len(items) == 0:
value = None
elif len(items) == 1:
value = parse_literal(items[0])
else:
if not ISPPar._re_float_literal.match(items[0]):
# Value is a string literal containing whitespace characters
value = match.group(2)
else:
# Evaluate each item and stop at the first non-float literal
value = []
for i in items:
match = ISPPar._re_float_literal.match(i)
if match:
value.append(parse_literal(match.group()))
else:
# If the first float literal is immediately followed by a non-float literal handle the
# first one as singular value, e.g. in '20.0970 dB'
if len(value) == 1:
value = value[0]
break
self.keys.append(key)
setattr(self, key, value)
if hasattr(self, 'date'):
# the date field is rounded to four digits, so only the day is extracted
# and then the start_time field is added to be more precise and to avoid
# rounding to 60 s.
self.date_dt = datetime(*self.date[:3])
self.date_dt += timedelta(seconds=self.start_time)
self.date = self.date_dt.strftime('%Y-%m-%dT%H:%M:%S.%f')
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return
def __getattr__(self, item):
# will only be run if object has no attribute item
raise AttributeError("parameter file has no attribute '{}'".format(item))
def __str__(self):
maxlen = len(max(self.keys, key=len)) + 1
return '\n'.join(['{key}:{sep}{value}'.format(key=key,
sep=(maxlen - len(key)) * ' ',
value=getattr(self, key)) for key in self.keys])
def envidict(self, nodata=None):
"""
export relevant metadata to an ENVI HDR file compliant format
Parameters
----------
nodata: int, float or None
a no data value to write to the HDR file via attribute 'data ignore value'
Returns
-------
dict
a dictionary containing attributes translated to ENVI HDR naming
"""
out = dict(bands=1,
header_offset=0,
file_type='ENVI Standard',
interleave='bsq',
sensor_type='Unknown',
byte_order=1,
wavelength_units='Unknown')
if hasattr(self, 'date'):
out['acquisition_time'] = self.date + 'Z'
out['samples'] = getattr(self, union(['width', 'range_samples', 'samples'], self.keys)[0])
out['lines'] = getattr(self, union(['nlines', 'azimuth_lines', 'lines'], self.keys)[0])
dtypes_lookup = {'FCOMPLEX': 6, 'FLOAT': 4, 'REAL*4': 4, 'INTEGER*2': 2, 'SHORT': 12}
dtype = getattr(self, union(['data_format', 'image_format'], self.keys)[0])
if dtype not in dtypes_lookup.keys():
raise TypeError('unsupported data type: {}'.format(dtype))
out['data_type'] = dtypes_lookup[dtype]
if nodata is not None:
out['data_ignore_value'] = nodata
if out['data_type'] == 6:
out['complex_function'] = 'Power'
# projections = ['AEAC', 'EQA', 'LCC', 'LCC2', 'OMCH', 'PC', 'PS', 'SCH', 'TM', 'UTM']
# the corner coordinates are shifted by 1/2 pixel to the Northwest since GAMMA pixel
# coordinates are defined for the pixel center while in ENVI it is the upper left
if hasattr(self, 'DEM_projection'):
if self.DEM_projection == 'UTM':
hem = 'North' if float(self.false_northing) == 0 else 'South'
out['map_info'] = ['UTM', '1.0000', '1.0000',
self.corner_east - (abs(self.post_east) / 2),
self.corner_north + (abs(self.post_north) / 2),
str(abs(float(self.post_east))),
str(abs(float(self.post_north))),
self.projection_zone, hem, 'WGS-84', 'units=Meters']
elif self.DEM_projection == 'EQA':
out['map_info'] = ['Geographic Lat/Lon', '1.0000', '1.0000',
self.corner_lon - (abs(self.post_lon) / 2),
self.corner_lat + (abs(self.post_lat) / 2),
str(abs(float(self.post_lon))),
str(abs(float(self.post_lat))),
'WGS-84', 'units=Degrees']
elif self.DEM_projection == 'PS':
if self.projection_name == 'WGS 84 / Antarctic Polar Stereographic':
out['map_info'] = [
'EPSG:3031 - WGS 84 / Antarctic Polar Stereographic',
'1.0000',
'1.0000',
self.corner_east - (abs(self.post_east) / 2),
self.corner_north + (abs(self.post_north) / 2),
str(abs(float(self.post_east))),
str(abs(float(self.post_north))),
'WGS-84',
'units=Meters',
]
elif self.projection_name == 'WGS 84 / Arctic Polar Stereographic':
out['map_info'] = [
'EPSG:3995 - WGS 84 / Arctic Polar Stereographic',
'1.0000',
'1.0000',
self.corner_east - (abs(self.post_east) / 2),
self.corner_north + (abs(self.post_north) / 2),
str(abs(float(self.post_east))),
str(abs(float(self.post_north))),
'WGS-84',
'units=Meters',
]
else:
raise RuntimeError(
f'unsupported projection: "{self.DEM_projection}; {self.projection_name}". The projection name "{self.projection_name}" was not recognised. Expected projection names are "WGS 84 / Arctic Polar Stereographic" and "WGS 84 / Antarctic Polar Stereographic". Add support for the required projection name as an ENVI map info output in gamma.auxil.ISPPar.envidict.'
)
else:
raise RuntimeError(
f'unsupported projection: "{self.DEM_projection}; {self.projection_name}". To resolve, create an ENVI map info output for this projection in gamma.auxil.ISPPar.envidict.'
)
return out
class Namespace(object):
"""
GAMMA file name handler. This improves managing the many files names
handled when processing with GAMMA.
Parameters
----------
directory: str
the directory path where files shall be written.
basename: str
the product basename as returned by
:meth:`pyroSAR.drivers.ID.outname_base`
Examples
--------
>>> n = Namespace(directory='/path', basename='S1A__IW___A_20180829T170631')
>>> print(n.pix_geo)
'-'
>>> n.appreciate(['pix_geo'])
>>> print(n.pix_geo)
'/path/S1A__IW___A_20180829T170631_pix_geo'
"""
def __init__(self, directory, basename):
self.__base = basename
self.__outdir = directory
self.__reg = []
def __getitem__(self, item):
item = str(item).replace('.', '_')
return self.get(item)
def __getattr__(self, item):
# will only be run if object has no attribute item
return '-'
def appreciate(self, keys):
"""
Parameters
----------
keys: list[str]
Returns
-------
"""
for key in keys:
setattr(self, key.replace('.', '_'), os.path.join(self.__outdir, self.__base + '_' + key))
if key not in self.__reg:
self.__reg.append(key.replace('.', '_'))
def depreciate(self, keys):
"""
Parameters
----------
keys: list[str]
Returns
-------
"""
for key in keys:
setattr(self, key.replace('.', '_'), '-')
if key not in self.__reg:
self.__reg.append(key.replace('.', '_'))
def getall(self):
out = {}
for key in self.__reg:
out[key] = getattr(self, key)
return out
def select(self, selection):
return [getattr(self, key) for key in selection]
def isregistered(self, key):
return key in self.__reg
def isappreciated(self, key):
if self.isregistered(key):
if self.get(key) != '-':
return True
return False
def isfile(self, key):
return hasattr(self, key) and os.path.isfile(getattr(self, key))
def get(self, key):
return getattr(self, key)
def par2hdr(parfile, hdrfile, modifications=None, nodata=None):
"""
Create an ENVI HDR file from a GAMMA PAR file
Parameters
----------
parfile: str
the GAMMA parfile
hdrfile: str
the ENVI HDR file
modifications: dict or None
a dictionary containing value deviations to write to the HDR file
nodata: int, float or None
a no data value to write to the HDR file via attribute 'data ignore value'
Returns
-------
Examples
--------
>>> from pyroSAR.gamma.auxil import par2hdr
>>> par2hdr('dem_seg.par', 'inc.hdr')
# write a HDR file for byte data based on a parfile of float data
>>> par2hdr('dem_seg.par', 'ls_map.hdr', modifications={'data_type': 1})
See Also
--------
:class:`spatialist.envi.HDRobject`
:func:`spatialist.envi.hdr`
"""
with ISPPar(parfile) as par:
items = par.envidict(nodata)
if modifications is not None:
items.update(modifications)
hdr(items, hdrfile)
def process(
cmd: list[str],
outdir: str | None = None,
logfile: str | None = None,
logpath: str | None = None,
inlist: list[str] | None = None,
void: bool = True,
shellscript: str | None = None
) -> tuple[str, str] | None:
"""
wrapper function to execute GAMMA commands via module :mod:`subprocess`
Parameters
----------
cmd:
The command line arguments.
outdir:
The directory to execute the command in. This directory is also set
as environment variable in `shellscript`.
logfile:
A file to write the command log to. Overrides parameter `logpath`.
logpath:
A directory to write logfiles to. The file will be named
{GAMMA command}.log, e.g. gc_map.log.
Overrides parameter `logfile`.
inlist:
A list of values, which is passed as interactive inputs via `stdin`.
void:
Return the `stdout` and `stderr` messages?
shellscript:
A file to write the GAMMA commands to in shell format.
Returns
-------
the stdout and stderr messages if void is False, otherwise None
"""
if logfile is not None:
log = logfile
else:
log = os.path.join(logpath, os.path.basename(cmd[0]) + '.log') if logpath else None
gamma_home = ExamineGamma().home
if shellscript is not None:
if not os.path.isfile(shellscript):
# create an empty file
with open(shellscript, 'w') as init:
pass
line = ' '.join([str(x) for x in dissolve(cmd)])
if inlist is not None:
line += ' <<< $"{}"'.format('\n'.join([str(x) for x in inlist]) + '\n')
with open(shellscript, 'r+') as sh:
content = sh.read()
sh.seek(0)
disclaimer = 'This script was created automatically by pyroSAR'
is_new = re.search(disclaimer, content) is None
if is_new:
ts = datetime.now().strftime('%a %b %d %H:%M:%S %Y')
sh.write(f'# {disclaimer} on {ts}\n\n')
sh.write('GAMMA_HOME={}\n\n'.format(gamma_home))
sh.write(content)
line = line.replace(gamma_home, '$GAMMA_HOME')
if outdir is not None:
line = line.replace(outdir, '$OUTDIR')
outdirs = re.findall('OUTDIR=(.*)\n', content)
if len(outdirs) == 0 or outdir != outdirs[-1]:
line = f"OUTDIR={outdir}\n\n{line}"
sh.seek(0, 2) # set pointer to the end of the file
sh.write(line + '\n\n')
# create an environment containing the locations of all GAMMA submodules to be passed to the subprocess calls
gammaenv = os.environ.copy()
gammaenv['GAMMA_HOME'] = gamma_home
returncode, out, err = run([ExamineGamma().gdal_config, '--datadir'], void=False)
gammaenv['GDAL_DATA'] = out.strip()
for module in ['DIFF', 'DISP', 'IPTA', 'ISP', 'LAT']:
loc = os.path.join(gammaenv['GAMMA_HOME'], module)
if os.path.isdir(loc):
gammaenv[module + '_HOME'] = loc
for submodule in ['bin', 'scripts']:
subloc = os.path.join(loc, submodule)
if os.path.isdir(subloc):
gammaenv['PATH'] += os.pathsep + subloc
# execute the command
returncode, out, err = run(cmd, outdir=outdir, logfile=log, inlist=inlist,
void=False, errorpass=True, env=gammaenv)
gammaErrorHandler(returncode, out, err)
if not void:
return out, err
def slc_corners(parfile):
"""
extract the corner coordinates of a SAR scene
Parameters
----------
parfile: str
the GAMMA parameter file to read coordinates from
Returns
-------
dict of float
a dictionary with keys xmin, xmax, ymin, ymax
"""
out, err = process(['SLC_corners', parfile], void=False)
pts = {}
pattern = r'-?[0-9]+\.[0-9]+'
for line in out.split('\n'):
if line.startswith('min. latitude'):
pts['ymin'], pts['ymax'] = [float(x) for x in
re.findall(pattern, line)]
elif line.startswith('min. longitude'):
pts['xmin'], pts['xmax'] = [float(x) for x in
re.findall(pattern, line)]
return pts
class Spacing(object):
"""
compute multilooking factors and pixel spacings from an ISPPar object for a defined ground range target pixel spacing
Parameters
----------
par: str or ISPPar
the ISP parameter file
spacing: int or float
the target pixel spacing in ground range
"""
def __init__(self, par, spacing='automatic'):
# compute ground range pixel spacing
par = par if isinstance(par, ISPPar) else ISPPar(par)
self.groundRangePS = par.range_pixel_spacing / (math.sin(math.radians(par.incidence_angle)))
# compute initial multilooking factors
if spacing == 'automatic':
if self.groundRangePS > par.azimuth_pixel_spacing:
ratio = self.groundRangePS / par.azimuth_pixel_spacing
self.rlks = 1
self.azlks = int(round(ratio))
else:
ratio = par.azimuth_pixel_spacing / self.groundRangePS
self.rlks = int(round(ratio))
self.azlks = 1
else:
self.rlks = int(round(float(spacing) / self.groundRangePS))
self.azlks = int(round(float(spacing) / par.azimuth_pixel_spacing))
class UTM(object):
"""
convert a gamma parameter file corner coordinate from EQA to UTM
Parameters
----------
parfile: str
the GAMMA parameter file to read the coordinate from
Example
-------
>>> from pyroSAR.gamma import UTM
>>> print(UTM('gamma.par').zone)
"""
def __init__(self, parfile):
par = ISPPar(parfile)
inlist = ['WGS84', 1, 'EQA', par.corner_lon, par.corner_lat, '', 'WGS84', 1, 'UTM', '']
inlist = map(str, inlist)
proc = sp.Popen(['coord_trans'], stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE,
universal_newlines=True, shell=False)
out, err = proc.communicate(''.join([x + '\n' for x in inlist]))
out = [x for x in filter(None, out.split('\n')) if ':' in x]
self.meta = dict()
for line in out:
key, value = re.split(r'\s*:\s*', line)
value = value.split()
value = map(parse_literal, value) if len(value) > 1 else value[0]
self.meta[key] = value
try:
self.zone, self.northing, self.easting, self.altitude = \
self.meta['UTM zone/northing/easting/altitude (m)']
except KeyError:
self.zone, self.northing, self.easting = \
self.meta['UTM zone/northing/easting (m)']
================================================
FILE: pyroSAR/gamma/dem.py
================================================
###############################################################################
# preparation of DEM data for use in GAMMA
# Copyright (c) 2014-2026, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
################################################################################
"""
A collection of functions to handle digital elevation models in GAMMA
"""
from urllib.request import urlopen
import os
import re
import shutil
import zipfile as zf
from spatialist import raster, gdal_translate, gdalbuildvrt, gdalwarp, crsConvert
from spatialist.ancillary import finder
from spatialist.envi import HDRobject
from ..auxdata import dem_autoload, dem_create
from ..drivers import ID
from . import ISPPar, UTM, slc_corners, par2hdr
from pyroSAR.examine import ExamineGamma
from pyroSAR.ancillary import hasarg
import logging
log = logging.getLogger(__name__)
try:
from .api import diff, disp, isp
except ImportError:
pass
def fill(dem, dem_out, logpath=None, replace=False):
"""
interpolate missing values in the SRTM DEM (value -32768)
Parameters
----------
dem: str
the input DEM to be filled
dem_out: str
the name of the filled DEM
logpath: str
a directory to write logfiles to
replace: bool
delete `dem` once finished?
Returns
-------
"""
width = ISPPar(dem + '.par').width
path_dem = os.path.dirname(dem_out)
rpl_flg = 0
dtype = 4
# replace values
value = 0
new_value = 1
disp.replace_values(f_in=dem,
value=value,
new_value=new_value,
f_out=dem + '_temp',
width=width,
rpl_flg=rpl_flg,
dtype=dtype,
logpath=logpath)
value = -32768
new_value = 0
disp.replace_values(f_in=dem + '_temp',
value=value,
new_value=new_value,
f_out=dem + '_temp2',
width=width,
rpl_flg=rpl_flg,
dtype=dtype,
outdir=path_dem,
logpath=logpath)
# interpolate missing values
isp.interp_ad(data_in=dem + '_temp2',
data_out=dem_out,
width=width,
r_max=9,
np_min=40,
np_max=81,
w_mode=2,
dtype=dtype,
outdir=path_dem,
logpath=logpath)
# remove temporary files
os.remove(dem + '_temp')
os.remove(dem + '_temp2')
# duplicate parameter file for newly created dem
shutil.copy(dem + '.par', dem_out + '.par')
# create ENVI header file
par2hdr(dem_out + '.par', dem_out + '.hdr')
if replace:
for item in [dem + x for x in ['', '.par', '.hdr', '.aux.xml'] if os.path.isfile(dem + x)]:
os.remove(item)
def transform(infile, outfile, posting=90):
"""
transform SRTM DEM from EQA to UTM projection
"""
# read DEM parameter file
par = ISPPar(infile + '.par')
# transform corner coordinate to UTM
utm = UTM(infile + '.par')
for item in [outfile, outfile + '.par']:
if os.path.isfile(item):
os.remove(item)
# determine false northing from parameter file coordinates
falsenorthing = 10000000. if par.corner_lat < 0 else 0
# create new DEM parameter file with UTM projection details
inlist = ['UTM', 'WGS84', 1, utm.zone, falsenorthing, os.path.basename(outfile), '', '', '', '', '',
'-{0} {0}'.format(posting), '']
diff.create_dem_par(DEM_par=outfile + '.par',
inlist=inlist)
# transform dem
diff.dem_trans(DEM1_par=infile + '.par',
DEM1=infile,
DEM2_par=outfile + '.par',
DEM2=outfile,
bflg=1)
par2hdr(outfile + '.par', outfile + '.hdr')
def dem_autocreate(geometry, demType, outfile, buffer=None, t_srs=4326, tr=None, logpath=None,
username=None, password=None, geoid_mode='gamma', resampling_method='bilinear'):
"""
| automatically create a DEM in GAMMA format for a defined spatial geometry.
| The following steps will be performed:
- collect all tiles overlapping with the geometry using :func:`pyroSAR.auxdata.dem_autoload`
* if they don't yet exist locally they will automatically be downloaded
* the tiles will be downloaded into the SNAP auxdata directory structure,
e.g. ``$HOME/.snap/auxdata/dem/SRTM 3Sec``
- create a mosaic GeoTIFF of the same spatial extent as the input geometry
plus a defined buffer using :func:`pyroSAR.auxdata.dem_create`
- if necessary, subtract the geoid-ellipsoid difference (see :func:`pyroSAR.auxdata.dem_autoload`
for height references of different supported DEMs)
- convert the result to GAMMA format
* If ``t_srs`` is `4326` and the DEM's height reference is either `WGS84` ellipsoid or `EGM96` geoid,
the command ``srtm2dem`` can be used. This is kept for backwards compatibility.
* For all other cases the newer command ``dem_import`` can be used if it exists and if the command
``create_dem_par`` accepts a parameter `EPSG`.
Parameters
----------
geometry: spatialist.vector.Vector
a vector geometry delimiting the output DEM size
demType: str
the type of DEM to be used; see :func:`~pyroSAR.auxdata.dem_autoload` for options
outfile: str
the name of the final DEM file
buffer: float or None
a buffer in degrees to create around the geometry
t_srs: int, str or osgeo.osr.SpatialReference
A target geographic reference system in WKT, EPSG, PROJ4 or OPENGIS format.
See function :func:`spatialist.auxil.crsConvert()` for details.
Default: `4326 `_.
tr: tuple or None
the target resolution as (xres, yres) in units of ``t_srs``; if ``t_srs`` is kept at its default value of 4326,
``tr`` does not need to be defined and the original resolution is preserved;
in all other cases the default of None is rejected
logpath: str
a directory to write GAMMA logfiles to
username: str or None
(optional) the user name for services requiring registration;
see :func:`~pyroSAR.auxdata.dem_autoload`
password: str or None
(optional) the password for the registration account
geoid_mode: str
the software to be used for converting geoid to ellipsoid heights (if necessary); options:
- 'gamma'
- 'gdal'
resampling_method: str
the gdalwarp resampling method; See `here `_
for options.
Returns
-------
"""
geometry = geometry.clone()
epsg = crsConvert(t_srs, 'epsg') if t_srs != 4326 else t_srs
if epsg != 4326:
if not hasarg(diff.create_dem_par, 'EPSG'):
raise RuntimeError('using a different CRS than 4326 is currently '
'not supported for this version of GAMMA')
if 'dem_import' not in dir(diff):
raise RuntimeError('using a different CRS than 4326 currently requires command '
'dem_import, which is not part of this version of GAMMA')
if tr is None:
raise RuntimeError('tr needs to be defined if t_srs is not 4326')
if os.path.isfile(outfile):
log.info('outfile already exists')
return
tmpdir = outfile + '__tmp'
os.makedirs(tmpdir)
try:
if logpath is not None and not os.path.isdir(logpath):
os.makedirs(logpath)
vrt = os.path.join(tmpdir, 'dem.vrt')
dem = os.path.join(tmpdir, 'dem.tif')
if epsg == geometry.getProjection('epsg') and buffer is None:
ext = geometry.extent
bounds = [ext['xmin'], ext['ymin'], ext['xmax'], ext['ymax']]
else:
bounds = None
geometry.reproject(4326)
log.info('collecting DEM tiles')
dem_autoload([geometry], demType, vrt=vrt, username=username,
password=password, buffer=buffer)
# TanDEM-X DEM, GETASSE30 DEM: ellipsoidal heights,
# Copernicus DEM: EGM2008 geoid, all others are EGM96 heights
# GAMMA works only with ellipsoid heights and the offset needs to be corrected
# starting from GDAL 2.2 the conversion can be done directly in GDAL; see docs of gdalwarp
message = 'conversion to GAMMA format'
geoid = None
if demType not in ['TDX90m', 'GETASSE30']:
message = 'geoid correction and conversion to GAMMA format'
if re.search('Copernicus [139]0m', demType):
geoid = 'EGM2008'
elif demType in ['AW3D30', 'SRTM 1Sec HGT', 'SRTM 3Sec']:
geoid = 'EGM96'
else:
raise RuntimeError("'demType' is not supported")
if geoid_mode == 'gdal':
gamma_geoid = None
if geoid is not None:
gdal_geoid = True
else:
gdal_geoid = False
elif geoid_mode == 'gamma':
gdal_geoid = False
gamma_geoid = geoid
else:
raise RuntimeError("'geoid_mode' is not supported")
dem_create(vrt, dem, t_srs=epsg, tr=tr, geoid_convert=gdal_geoid,
resampleAlg=resampling_method, outputBounds=bounds,
geoid=geoid)
outfile_tmp = os.path.join(tmpdir, os.path.basename(outfile))
log.info(message)
dem_import(src=dem, dst=outfile_tmp, geoid=gamma_geoid,
logpath=logpath, outdir=tmpdir)
for suffix in ['', '.par', '.hdr']:
shutil.copyfile(outfile_tmp + suffix, outfile + suffix)
except RuntimeError as e:
raise e
finally:
shutil.rmtree(tmpdir)
def dem_import(
src: str,
dst: str,
geoid: str | None = None,
logpath: str | None = None,
outdir: str | None = None,
shellscript: str | None = None
) -> None:
"""
convert an existing DEM in GDAL-readable format to GAMMA
format including optional geoid-ellipsoid conversion.
Parameters
----------
src:
the input DEM
dst:
the output DEM
geoid:
the geoid height reference of `src`; supported options:
- 'EGM96'
- 'EGM2008'
- None: assume WGS84 ellipsoid heights and do not convert heights
logpath:
a directory to write logfiles to
outdir:
the directory to execute the command in
shellscript:
a file to write the GAMMA commands to in shell format
"""
with raster.Raster(src) as ras:
epsg = ras.epsg
if epsg != 4326:
if not hasarg(diff.create_dem_par, 'EPSG'):
raise RuntimeError('using a different CRS than EPSG:4326 is currently '
'not supported for this version of GAMMA')
if 'dem_import' not in dir(diff):
raise RuntimeError('using a different CRS than 4326 currently requires command '
'dem_import, which is not part of this version of GAMMA')
dst_base = os.path.splitext(dst)[0]
if geoid is not None:
# "Add interpolated geoid offset relative to the WGS84 datum;
# NODATA are set to the interpolated geoid offset."
gflg = 2
else:
# "No geoid offset correction, replace NODATA with a valid near-zero value."
gflg = 0
if epsg == 4326 and geoid == 'EGM96':
# old approach for backwards compatibility
diff.srtm2dem(SRTM_DEM=src,
DEM=dst,
DEM_par=dst + '.par',
gflg=gflg,
geoid='-',
logpath=logpath,
outdir=outdir,
shellscript=shellscript)
else:
# new approach enabling an arbitrary target CRS EPSG code
diff.create_dem_par(DEM_par=dst_base + '.par',
inlist=[''] * 9,
EPSG=epsg,
logpath=logpath,
outdir=outdir,
shellscript=shellscript)
dem_import_pars = {'input_DEM': src,
'DEM': dst,
'DEM_par': dst_base + '.par',
'logpath': logpath,
'outdir': outdir,
'shellscript': shellscript}
if gflg == 2:
home = ExamineGamma().home
if geoid == 'EGM96':
geoid_file = os.path.join(home, 'DIFF', 'scripts', 'egm96.dem')
elif geoid == 'EGM2008':
geoid_file = os.path.join(home, 'DIFF', 'scripts', 'egm2008-5.dem')
else:
raise RuntimeError(f"conversion of '{geoid}' geoid is not supported by GAMMA")
dem_import_pars['geoid'] = geoid_file
dem_import_pars['geoid_par'] = geoid_file + '_par'
diff.dem_import(**dem_import_pars)
par2hdr(dst_base + '.par', dst_base + '.hdr', nodata=0)
def dempar(dem, logpath=None):
"""
create GAMMA parameter text files for DEM files
currently only EQA and UTM projections with WGS84 ellipsoid are supported
Parameters
----------
dem: str
the name of the DEM
logpath: str
a directory to write logfiles to
Returns
-------
"""
rast = raster.Raster(dem)
# determine data type
dtypes = {'Int16': 'INTEGER*2', 'UInt16': 'INTEGER*2', 'Float32': 'REAL*4'}
if rast.dtype not in dtypes:
raise IOError('data type not supported')
else:
dtype = dtypes[rast.dtype]
# format pixel posting and top left coordinate
posting = str(rast.geo['yres']) + ' ' + str(rast.geo['xres'])
latlon = str(rast.geo['ymax']) + ' ' + str(rast.geo['xmin'])
# evaluate projection
projections = {'longlat': 'EQA', 'utm': 'UTM'}
if rast.proj4args['proj'] not in projections:
raise ValueError('projection not supported (yet)')
else:
projection = projections[rast.proj4args['proj']]
# get ellipsoid
ellipsoid = rast.proj4args['ellps'] if 'ellps' in rast.proj4args else rast.proj4args['datum']
if ellipsoid != 'WGS84':
raise ValueError('ellipsoid not supported (yet)')
# create list for GAMMA command input
if projection == 'UTM':
zone = rast.proj4args['zone']
falsenorthing = 10000000. if rast.geo['ymin'] < 0 else 0
parlist = [projection, ellipsoid, 1, zone, falsenorthing, os.path.basename(dem),
dtype, 0, 1, rast.cols, rast.rows, posting, latlon]
else:
parlist = [projection, ellipsoid, 1, os.path.basename(dem), dtype,
0, 1, rast.cols, rast.rows, posting, latlon]
# execute GAMMA command
diff.create_dem_par(DEM_par=os.path.splitext(dem)[0] + '.par',
inlist=parlist,
outdir=os.path.dirname(dem),
logpath=logpath)
def swap(data, outname):
"""
byte swapping from small to big endian (as required by GAMMA)
Parameters
----------
data: str
the DEM file to be swapped
outname: str
the name of the file to write
Returns
-------
"""
with raster.Raster(data) as ras:
dtype = ras.dtype
ras_format = ras.format
if ras_format != 'ENVI':
raise IOError('only ENVI format supported')
dtype_lookup = {'Int16': 2, 'CInt16': 2, 'Int32': 4, 'Float32': 4, 'CFloat32': 4, 'Float64': 8}
if dtype not in dtype_lookup:
raise IOError('data type {} not supported'.format(dtype))
disp.swap_bytes(infile=data,
outfile=outname,
swap_type=dtype_lookup[dtype])
with HDRobject(data + '.hdr') as header:
header.byte_order = 1
header.write(outname + '.hdr')
def mosaic(demlist, outname, byteorder=1, gammapar=True):
"""
mosaicing of multiple DEMs
Parameters
----------
demlist: list[str]
a list of DEM names to be mosaiced
outname: str
the name of the final mosaic file
byteorder: {0, 1}
the byte order of the mosaic
- 0: small endian
- 1: big endian
gammapar: bool
create a GAMMA parameter file for the mosaic?
Returns
-------
"""
if len(demlist) < 2:
raise IOError('length of demlist < 2')
with raster.Raster(demlist[0]) as ras:
nodata = ras.nodata
par = {'format': 'ENVI',
'srcNodata': nodata, ' dstNodata': nodata,
'options': ['-q']}
gdalwarp(src=demlist, dst=outname, **par)
if byteorder == 1:
swap(outname, outname + '_swap')
for item in [outname, outname + '.hdr', outname + '.aux.xml']:
os.remove(item)
os.rename(outname + '_swap', outname)
os.rename(outname + '_swap.hdr', outname + '.hdr')
if gammapar:
dempar(outname)
def hgt(parfiles):
"""
concatenate hgt file names overlapping with multiple SAR scenes
- this list is read for corner coordinates of which the next integer
lower left latitude and longitude is computed
- hgt files are supplied in 1 degree equiangular format named e.g.
N16W094.hgt (with pattern [NS][0-9]{2}[EW][0-9]{3}.hgt
- For north and east hemisphere the respective absolute latitude and longitude
values are smaller than the lower left coordinate of the SAR image
- west and south coordinates are negative and hence the nearest lower left
integer absolute value is going to be larger
Parameters
----------
parfiles: list of str or pyroSAR.ID
a list of GAMMA parameter files or pyroSAR ID objects
Returns
-------
list
the names of hgt files overlapping with the supplied parameter files/objects
"""
lat = []
lon = []
for parfile in parfiles:
if isinstance(parfile, ID):
corners = parfile.getCorners()
elif parfile.endswith('.par'):
corners = slc_corners(parfile)
else:
raise RuntimeError('parfiles items must be of type pyroSAR.ID or GAMMA parfiles with suffix .par')
lat += [int(float(corners[x]) // 1) for x in ['ymin', 'ymax']]
lon += [int(float(corners[x]) // 1) for x in ['xmin', 'xmax']]
# add missing lat/lon values (and add an extra buffer of one degree)
lat = range(min(lat), max(lat) + 1)
lon = range(min(lon), max(lon) + 1)
# convert coordinates to string with leading zeros and hemisphere identification letter
lat = [str(x).zfill(2 + len(str(x)) - len(str(x).strip('-'))) for x in lat]
lat = [x.replace('-', 'S') if '-' in x else 'N' + x for x in lat]
lon = [str(x).zfill(3 + len(str(x)) - len(str(x).strip('-'))) for x in lon]
lon = [x.replace('-', 'W') if '-' in x else 'E' + x for x in lon]
# concatenate all formatted latitudes and longitudes with each other as final product
return [x + y + '.hgt' for x in lat for y in lon]
def makeSRTM(scenes, srtmdir, outname):
"""
Create a DEM in GAMMA format from SRTM tiles
- coordinates are read to determine the required DEM extent and select the necessary hgt tiles
- mosaics SRTM DEM tiles, converts them to GAMMA format and subtracts offset to WGS84 ellipsoid
intended for SRTM products downloaded from:
- USGS: https://gdex.cr.usgs.gov/gdex/
- CGIAR: https://srtm.csi.cgiar.org
Parameters
----------
scenes: list of str or pyroSAR.ID
a list of Gamma parameter files or pyroSAR ID objects to read the DEM extent from
srtmdir: str
a directory containing the SRTM hgt tiles
outname: str
the name of the final DEM file
Returns
-------
"""
tempdir = outname + '___temp'
os.makedirs(tempdir)
hgt_options = hgt(scenes)
hgt_files = finder(srtmdir, hgt_options)
nodatas = list(set([raster.Raster(x).nodata for x in hgt_files]))
if len(nodatas) == 1:
nodata = nodatas[0]
else:
raise RuntimeError('different nodata values are not permitted')
srtm_vrt = os.path.join(tempdir, 'srtm.vrt')
srtm_temp = srtm_vrt.replace('.vrt', '_tmp')
srtm_final = srtm_vrt.replace('.vrt', '')
gdalbuildvrt(src=hgt_files, dst=srtm_vrt, srcNodata=nodata, options=['-overwrite'])
gdal_translate(src=srtm_vrt, dst=srtm_temp, format='ENVI', noData=nodata)
diff.srtm2dem(SRTM_DEM=srtm_temp,
DEM=srtm_final,
DEM_par=srtm_final + '.par',
gflg=2,
geoid='-',
outdir=tempdir)
shutil.move(srtm_final, outname)
shutil.move(srtm_final + '.par', outname + '.par')
par2hdr(outname + '.par', outname + '.hdr')
shutil.rmtree(tempdir)
def hgt_collect(parfiles, outdir, demdir=None, arcsec=3):
"""
automatic downloading and unpacking of srtm tiles
Parameters
----------
parfiles: list of str or pyroSAR.ID
a list of Gamma parameter files or pyroSAR ID objects
outdir: str
a target directory to download the tiles to
demdir: str or None
an additional directory already containing hgt tiles
arcsec: {1, 3}
the spatial resolution to be used
Returns
-------
list
the names of all local hgt tiles overlapping with the parfiles
"""
# concatenate required hgt tile names
target_ids = hgt(parfiles)
targets = []
pattern = '[NS][0-9]{2}[EW][0-9]{3}'
# if an additional dem directory has been defined, check this directory for required hgt tiles
if demdir is not None:
targets.extend(finder(demdir, target_ids))
# check for additional potentially existing hgt tiles in the defined output directory
extras = [os.path.join(outdir, x) for x in target_ids if
os.path.isfile(os.path.join(outdir, x)) and not re.search(x, '\n'.join(targets))]
targets.extend(extras)
log.info('found {} relevant SRTM tiles...'.format(len(targets)))
# search server for all required tiles, which were not found in the local directories
if len(targets) < len(target_ids):
log.info('searching for additional SRTM tiles on the server...')
onlines = []
if arcsec == 1:
remotes = ['http://e4ftl01.cr.usgs.gov/SRTM/SRTMGL1.003/2000.02.11/']
remotepattern = pattern + '.SRTMGL1.hgt.zip'
elif arcsec == 3:
server = 'https://dds.cr.usgs.gov/srtm/version2_1/SRTM3/'
remotes = [os.path.join(server, x) for x in
['Africa', 'Australia', 'Eurasia', 'Islands', 'North_America', 'South_America']]
remotepattern = pattern + '[.]hgt.zip'
else:
raise ValueError('argument arcsec must be of value 1 or 3')
for remote in remotes:
response = urlopen(remote).read()
items = sorted(set(re.findall(remotepattern, response)))
for item in items:
outname = re.findall(pattern, item)[0] + '.hgt'
if outname in target_ids and outname not in [os.path.basename(x) for x in targets]:
onlines.append(os.path.join(remote, item))
# if additional tiles have been found online, download and unzip them to the local directory
if len(onlines) > 0:
log.info('downloading {} SRTM tiles...'.format(len(onlines)))
for candidate in onlines:
localname = os.path.join(outdir, re.findall(pattern, candidate)[0] + '.hgt')
infile = urlopen(candidate)
with open(localname + '.zip', 'wb') as outfile:
outfile.write(infile.read())
infile.close()
with zf.ZipFile(localname + '.zip', 'r') as z:
z.extractall(outdir)
os.remove(localname + '.zip')
targets.append(localname)
return targets
================================================
FILE: pyroSAR/gamma/error.py
================================================
###############################################################################
# interface for translating GAMMA errors messages into Python error types
# Copyright (c) 2015-2026, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
import re
import signal
def gammaErrorHandler(returncode: int, out: str, err: str) -> None:
"""
Function to raise errors in Python. This function is not intended
for direct use but as part of function :func:`pyroSAR.gamma.auxil.process`.
Parameters
----------
returncode:
the subprocess return code
out:
the stdout message returned by a subprocess call of a gamma command
err:
the stderr message returned by a subprocess call of a gamma command
Raises: IOError | ValueError | RuntimeError
"""
# scan stdout and stdin messages for lines starting with 'ERROR'
messages = out.split('\n') if out else []
messages.extend(err.strip().split('\n'))
errormessages = [x for x in messages if x.startswith('ERROR')]
# registry of known gamma error messages and corresponding Python error types
# do not change the Python error types of specific messages! This will change the behavior of several functions
# in case no error is to be thrown define None as error type
knownErrors = {'image data formats differ': IOError,
'cannot open': IOError,
r'no coverage of SAR image by DEM(?: \(in (?:latitude/northing|longitude/easting)\)|)': RuntimeError,
'libgdal.so.1: no version information available': None,
'line outside of image': ValueError,
'no offsets found above SNR threshold': ValueError,
'window size < 4': ValueError,
'MLI oversampling factor must be 1, 2, 4, 8': ValueError,
'no points available for determining average intensity': ValueError,
'p_interp(): time outside of range': RuntimeError,
'no overlap with lookup table': RuntimeError,
'insufficient offset points to determine offset model parameters': RuntimeError,
'insufficient offset points left after culling to determine offset model parameters': RuntimeError,
'calloc_1d: number of elements <= 0': ValueError,
'multi-look output line:': RuntimeError,
'no OPOD state vector found with the required start time!': RuntimeError,
'gc_map operates only with slant range geometry, image geometry in SLC_par: GROUND_RANGE': RuntimeError,
'OPOD state vector data ends before start of the state vector time window': RuntimeError,
'non-zero exit status': RuntimeError,
'unsupported DEM projection': RuntimeError,
'tiffWriteProc:No space left on device': RuntimeError,
'in subroutine julday: there is no year zero!': RuntimeError,
'cannot create ISP image parameter file': OSError}
# check if the error message is known and throw the mapped error from knownErrors accordingly.
# Otherwise raise a RuntimeError if killed by a signal and a GammaUnknownError in all other cases.
# The actual message is passed to the error and thus visible for backtracing.
if returncode != 0:
if len(errormessages) > 0:
errormessage = errormessages[-1]
err_out = '\n\n'.join([re.sub('ERROR[: ]*', '', x) for x in errormessages])
for error in knownErrors:
if re.search(error, errormessage):
errortype = knownErrors[error]
if errortype:
raise errortype(err_out)
else:
return
else:
err_out = f'{err}\nfailed with return code {returncode}'
if returncode < 0:
# handle signal kills like SIGSEGV (segmentation fault)
sig = signal.Signals(-returncode)
raise RuntimeError(err_out + f' ({sig.name})')
raise GammaUnknownError(err_out)
class GammaUnknownError(Exception):
"""
This is a general error, which is raised if the error message is not yet integrated
into the known errors of function :func:`gammaErrorHandler`.
If this error occurs, the message should be included in this function.
"""
def __init__(self, errormessage):
Exception.__init__(self, errormessage)
================================================
FILE: pyroSAR/gamma/parser.py
================================================
###############################################################################
# parse GAMMA command docstrings to Python functions
# Copyright (c) 2015-2025, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
import os
import re
from shutil import which
import subprocess as sp
from collections import Counter
from spatialist.ancillary import finder, dissolve
from pyroSAR.examine import ExamineGamma
import logging
log = logging.getLogger(__name__)
def parse_command(command, indent=' '):
"""
Parse the help text of a GAMMA command to a Python function including a docstring.
The docstring is in rst format and can thu be parsed by e.g. sphinx.
This function is not intended to be used by itself, but rather within function :func:`parse_module`.
Parameters
----------
command: str
the name of the gamma command
indent: str
the Python function indentation string; default: four spaces
Returns
-------
str
the full Python function text
"""
# run the command without passing arguments to just catch its usage description
command = which(command)
if command is None:
raise OSError('command does not exist')
command_base = os.path.basename(command)
proc = sp.Popen(command, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE, universal_newlines=True)
out, err = proc.communicate()
# sometimes the description string is split between stdout and stderr
# for the following commands stderr contains the usage description line, which is inserted into stdout
if command_base in ['ras_pt', 'ras_data_pt', 'rasdt_cmap_pt']:
out = out.replace(' ***\n ', ' ***\n ' + err)
else:
# for all other commands stderr is just appended to stdout
out += err
# raise a warning when the command has been deprecated
# extract all lines starting and ending with three asterisks
matches = re.findall(r'^\*{3}\s*(.*?)\s*\*{3}$', out, re.MULTILINE)
if matches:
# join the lines and search for a deprecation message
cleaned = ' '.join(matches)
pattern = (r'([\w\.]+ (?:has been|was) re(?:named to|placed(?: that [ \*\n]*|) by)'
r'(?:[ \*\n]*|)(?: the ISP program|) [\w\.]+)')
match = re.search(pattern, cleaned)
if match:
raise DeprecationWarning(match.group())
if re.search(r"Can't locate FILE/Path\.pm in @INC", out):
raise RuntimeError('unable to parse Perl script')
###########################################
# fix command-specific inconsistencies in parameter naming
# in several commands the parameter naming in the usage description line does not match that of the docstring
parnames_lookup = {'2PASS_INT': [('OFF_PAR', 'OFF_par')],
'adapt_filt': [('low_snr_thr', 'low_SNR_thr')],
'atm_mod2': [('rpt', 'report'),
('[mode]', '[model_atm]'),
('[model]', '[model_atm]'),
('model atm', 'model_atm atm'),
],
'atm_mod_2d': [('xref', 'rref'),
('yref', 'azref')],
'atm_mod_2d_pt': [('[sigma_min]', '[sigma_max]')],
'base_calc': [('plt_flg', 'plt_flag'),
('pltflg', 'plt_flag')],
'base_init': [('', '')],
'base_plot': [('plt_flg', 'plt_flag'),
('pltflg', 'plt_flag')],
'cc_monitoring': [('...', '<...>')],
'cct_sp_pt': [('pcct_sp_pt', 'pcct_sp')],
'comb_interfs': [('combi_out', 'combi_int')],
'coord_to_sarpix': [('north/lat', 'north_lat'),
('east/lon', 'east_lon'),
('SLC_par', ''),
('SLC/MLI_par', 'SLC_MLI_par')],
'data2geotiff': [('nodata', 'no_data')],
'def_mod': [('', ''),
('def (output)', 'def_rate (output)')],
'dis2hgt': [('m/cycle', 'm_cycle')],
'discc': [('min_corr', 'cmin'),
('max_corr', 'cmax')],
'disp2ras': [('', '')],
'dis_data': [('...', '<...>')],
'dispwr': [('data_type', 'dtype')],
'DORIS_vec': [('SLC_PAR', 'SLC_par')],
'gc_map_fd': [('fdtab', 'fd_tab')],
'gc_map_grd': [('', '')],
'geocode_back': [('', ''),
('\n gc_map ', '\n lookup_table ')],
'GRD_to_SR': [('SLC_par', 'MLI_par')],
'haalpha': [('', ''),
('alpha (output)', 'alpha2 (output)')],
'histogram_ras': [('mean/stdev', 'mean_stdev')],
'hsi_color_scale': [('[chip]', '[chip_width]')],
'HUYNEN_DEC': [('T11_0', 'T11'),
('', ''),
('HUYNEN_DEC:', '***')],
'interf_SLC': [(' SLC2_pa ', ' SLC2_par ')],
'ionosphere_mitigation': [('', '')],
'landsat2dem': [('', '')],
'line_interp': [('input file', 'data_in'),
('output file', 'data_out')],
'm-alpha': [(' ')],
'm-chi': [(' ')],
'm-delta': [(' ')],
'map_section': [('n1', 'north1'),
('e1', 'east1'),
('n2', 'north2'),
('e2', 'east2'),
('[coord]', '[coords]')],
'mask_class': [('...', '<...>')],
'mcf_pt': [('', '[azlks]'),
('', '[rlks]')],
'mk_2d_im_geo': [('exponent', 'exp')],
'mk_adf2_2d': [('[alpha_max [', '[alpha_max] ['),
('-m MLI_dir', 'mli_dir'),
('-s scale', 'scale'),
('-e exp', 'exponent'),
('-u', 'update'),
('-D', 'dem_par')],
'mk_base_calc': [('', '')],
'mk_cpd_all': [('dtab', 'data_tab')],
'mk_cpx_ref_2d': [('diff_tab', 'cpx_tab')],
'mk_diff_tc_2d': [('', ''),
('def (input)', 'def_rate (input)')],
'mk_dispmap2_2d': [('RMLI_image', 'MLI'),
('RMLI_par', 'MLI_par'),
('MLI_image', 'MLI'),
('DISP_tab', 'disp_tab')],
'mk_dispmap_2d': [('RMLI_image', 'MLI'),
('RMLI_par', 'MLI_par'),
('MLI_image', 'MLI'),
('DISP_tab', 'disp_tab')],
'mk_geo_data_all': [('data_geo_dir', 'geo_dir')],
'mk_itab': [('', '')],
'mk_hgt_2d': [('m/cycle', 'm_cycle')],
'mk_pol2rec_2d': [('data_tab', 'DIFF_tab'),
('', ''),
('', ''),
('type input', 'dtype input'),
('\n Options:\n', ''),
('-s scale', 'scale'),
('-e exp', 'exponent'),
('-a min', 'min'),
('-b max', 'max'),
('-R rmax', 'rmax'),
('-m mode', 'mode'),
('-u', 'update')],
'mk_rasdt_all': [('RMLI_image', 'MLI'),
('MLI_image', 'MLI')],
'mk_rasmph_all': [('RMLI_image', 'MLI'),
('MLI_image', 'MLI')],
'mk_tab2': [('--linenumber', 'linenumber')],
'mk_unw_2d': [('unw_mask1', 'unw_mask')],
'mk_unw_ref_2d': [('diff_tab', 'DIFF_tab')],
'MLI2pt': [('MLI_TAB', 'MLI_tab'),
('pSLC_par', 'pMLI_par')],
'mosaic': [('<..>', '<...>'),
('DEM_parout', 'DEM_par_out')],
'multi_class_mapping': [('...', '<...>')],
'multi_def': [('', ''),
('def (output)', 'def_rate (output)')],
'multi_look_geo': [('geo_SLC', 'SLC'),
('SLC/MLI', ('SLC_MLI'))],
'multi_look_MLI': [('MLI in_par', 'MLI_in_par')],
'offset_fit': [('interact_flag', 'interact_mode')],
'offset_plot_az': [('rmin', 'r_min'),
('rmax', 'r_max')],
'par_ASF_SLC': [('CEOS_SAR_leader', 'CEOS_leader')],
'par_ASAR': [('ASAR/ERS_file', 'ASAR_ERS_file')],
'par_EORC_JERS_SLC': [('slc', 'SLC')],
'par_ERSDAC_PALSAR': [('VEXCEL_SLC_par', 'ERSDAC_SLC_par')],
'par_ESA_JERS_SEASAT_SLC': [('[slc]', '[SLC]')],
'par_ICEYE_GRD': [('', ''),
('[mli]', '[MLI]')],
'par_ICEYE_SLC': [('[slc]', '[SLC]')],
'par_MSP': [('SLC/MLI_par', 'SLC_MLI_par')],
'par_SIRC': [('UTC/MET', 'UTC_MET')],
'par_TX_GRD': [('COSAR', 'GeoTIFF')],
'par_UAVSAR_SLC': [('SLC/MLC_in', 'SLC_MLC_in'),
('SLC/MLI_par', 'SLC_MLI_par'),
('SLC/MLI_out', 'SLC_MLI_out')],
'par_UAVSAR_geo': [('SLC/MLI_par', 'SLC_MLI_par')],
'phase_sim': [('sim (', 'sim_unw (')],
'product': [('wgt_flg', 'wgt_flag')],
'radcal_MLI': [('MLI_PAR', 'MLI_par')],
'radcal_PRI': [('GRD_PAR', 'GRD_par'),
('PRI_PAR', 'PRI_par')],
'radcal_SLC': [('SLC_PAR', 'SLC_par')],
'ras2jpg': [('{', '{{'),
('}', '}}')],
'ras_data_pt': [('pdata1', 'pdata')],
'ras_to_rgb': [('red channel', 'red_channel'),
('green channel', 'green_channel'),
('blue channel', 'blue_channel')],
'rascc_mask_thinning': [('...', '[...]')],
'rashgt': [('m/cycle', 'm_cycle')],
'rashgt_shd': [('m/cycle', 'm_cycle'),
('\n cycle ', '\n m_cycle ')],
'rasdt_cmap_pt': [('pdata1', 'pdata')],
'raspwr': [('hdrz', 'hdrsz')],
'ras_ras': [('r_lin/log', 'r_lin_log'),
('g_lin/log', 'g_lin_log'),
('b_lin/log', 'b_lin_log')],
'ras_ratio_dB': [('[min_cc] [max_cc] [scale] [exp]', '[min_value] [max_value] [dB_offset]')],
'rasSLC': [('[header]', '[hdrsz]')],
'ratio': [('wgt_flg', 'wgt_flag')],
'restore_float': [('input file', 'data_in'),
('output file', 'data_out'),
('interpolation_limit', 'interp_limit')],
'S1_coreg_TOPS_no_refinement': [('RLK', 'rlks'),
('AZLK', 'azlks')],
'S1_OPOD_vec': [('SLC_PAR', 'SLC_par')],
'single_class_mapping': [('>...', '> <...>')],
'ScanSAR_burst_cc_ad': [('bx', 'box_min'),
('by', 'box_max')],
'ScanSAR_burst_to_mosaic': [('DATA_tab_ref', 'data_tab_ref'),
('[mflg] [dtype]', '[mflg]')],
'ScanSAR_full_aperture_SLC': [('SLCR_dir', 'SLC2_dir')],
'scale_base': [('SLC-1_par-2', 'SLC1_par-2')],
'sigma2gamma': [('', ''),
('gamma (output)', 'gamma0 (output)'),
('pwr1', 'sigma0')],
'SLC_interp_lt': [('SLC-2', 'SLC2'),
('blksz', 'blk_size')],
'SLC_intf': [('SLC1s_par', 'SLC-1s_par'),
('SLC2Rs_par', 'SLC-2Rs_par')],
'SLC_intf_geo2': [('cc (', 'CC (')],
'SLC_interp_map': [('coffs2_sm', 'coffs_sm')],
'SLC_mosaic_S1_TOPS': [('wflg', 'bflg')],
'srtm_mosaic': [('', '')],
'SSI_INT_S1': [('', '')],
'texture': [('weights_flag', 'wgt_flag')],
'ts_rate': [('sim_flg', 'sim_flag')],
'TX_SLC_preproc': [('TX_list', 'TSX_list')],
'uchar2float': [('infile', 'data_in'),
('outfile', 'data_out')],
'validate': [('ras1', 'ras_map'),
('rasf_map', 'ras_map'),
('ras2', 'ras_inv'),
('rasf_inventory', 'ras_inv'),
('class1[1]', 'class1_1'),
('class1[2]', 'class1_2'),
('class1[n]', 'class1_n'),
('class2[1]', 'class2_1'),
('class2[2]', 'class2_2'),
('class2[n]', 'class2_n')]}
if command_base in parnames_lookup.keys():
for replacement in parnames_lookup[command_base]:
out = out.replace(*replacement)
###########################################
# filter header (general command description) and usage description string
header = '\n'.join([x.strip('* ') for x in re.findall('[*]{3}.*(?:[*]{3}|)', out)])
header = '| ' + header.replace('\n', '\n| ')
usage = re.search('usage:.*(?=\n)', out).group()
# filter required and optional arguments from usage description text
arg_req_raw = [re.sub(r'[^\w.-]*', '', x) for x in re.findall('[^<]*<([^>]*)>', usage)]
arg_opt_raw = [re.sub(r'[^\w.-]*', '', x) for x in re.findall(r'[^[]*\[([^]]*)]', usage)]
###########################################
# add parameters missing in the usage argument lists
appends = {'mk_adf2_2d': ['cc_min', 'cc_max', 'mli_dir', 'scale', 'exponent', 'update', 'dem_par'],
'mk_pol2rec_2d': ['scale', 'exponent', 'min', 'max', 'rmax', 'mode', 'update'],
'SLC_interp_S1_TOPS': ['mode', 'order'],
'SLC_interp_map': ['mode', 'order']}
if command_base in appends.keys():
for var in appends[command_base]:
if var not in arg_opt_raw:
arg_opt_raw.append(var)
###########################################
# define parameter replacements; this is intended for parameters which are to be aggregated into a list parameter
replacements = {'cc_monitoring': [(['nfiles', 'f1', 'f2', '...'],
['files'],
['a list of input data files (float)'])],
'dis_data': [(['nstack', 'pdata1', '...'],
['pdata'],
['a list of point data stack files'])],
'lin_comb': [(['nfiles', 'f1', 'f2', '...'],
['files'],
['a list of input data files (float)']),
(['factor1', 'factor2', '...'],
['factors'],
['a list of factors to multiply the input files with'])],
'lin_comb_cpx': [(['nfiles', 'f1', 'f2', '...'],
['files'],
['a list of input data files (float)']),
(['factor1_r', 'factor2_r', '...'],
['factors_r'],
['a list of real part factors to multiply the input files with']),
(['factor1_i', 'factor2_i'],
['factors_i'],
['a list of imaginary part factors to multiply the input files with'])],
'mask_class': [(['n_class', 'class_1', '...', 'class_n'],
['class_values'],
['a list of class map values'])],
'mosaic': [(['nfiles', 'data_in1', 'DEM_par1', 'data_in2', 'DEM_par2', '...', '...'],
['data_in_list', 'DEM_par_list'],
['a list of input data files',
'a list of DEM/MAP parameter files for each data file'])],
'multi_class_mapping': [(['nfiles', 'f1', 'f2', '...', 'fn'],
['files'],
['a list of input data files (float)'])],
'rascc_mask_thinning': [(['thresh_1', '...', 'thresh_nmax'],
['thresholds'],
['a list of thresholds sorted from smallest to '
'largest scale sampling reduction'])],
'single_class_mapping': [(['nfiles', 'f1', '...', 'fn'],
['files'],
['a list of point data stack files']),
(['lt1', 'ltn'],
['thres_lower'],
['a list of lower thresholds for the files']),
(['ut1', 'utn'],
['thres_upper'],
['a list of upper thresholds for the files'])],
'validate': [(['nclass1', 'class1_1', 'class1_2', '...', 'class1_n'],
['classes_map'],
['a list of class values for the map data file (max. 16), 0 for all']),
(['nclass2', 'class2_1', 'class2_2', '...', 'class2_n'],
['classes_inv'],
['a list of class values for the inventory data file (max. 16), 0 for all'])]}
if '..' in usage and command_base not in replacements.keys():
raise RuntimeError('the command contains multi-args which were not properly parsed')
def replace(inlist, replacement):
outlist = list(inlist)
for old, new, description in replacement:
if old[0] not in outlist:
return outlist
outlist[outlist.index(old[0])] = new
for i in range(1, len(old)):
if old[i] in outlist:
outlist.remove(old[i])
return dissolve(outlist)
arg_req = list(arg_req_raw)
arg_opt = list(arg_opt_raw)
if command_base in replacements.keys():
arg_req = replace(arg_req, replacements[command_base])
arg_opt = replace(arg_opt, replacements[command_base])
###########################################
# check if there are any double parameters
double = [k for k, v in Counter(arg_req + arg_opt).items() if v > 1]
if len(double) > 0:
raise RuntimeError('double parameter{0}: {1}'.format('s' if len(double) > 1 else '', ', '.join(double)))
###########################################
# add a parameter inlist for commands which take interactive input via stdin
# the list of commands, which are interactive is hard to assess and thus likely a source of future errors
inlist = ['create_dem_par', 'par_ESA_ERS']
if command_base in inlist:
arg_req.append('inlist')
######################################################################################
# create the function argument string for the Python function
# optional arguments are parametrized with '-' as default value, e.g., arg_opt='-'
argstr_function = ', '.join(arg_req + [x + "='-'" for x in arg_opt])
# a '-' in the parameter name is replaced with '_'
argstr_function = re.sub(r'([^\'])-([^\'])', r'\1_\2', argstr_function)
# replace unsupported 'def' parameter name with 'drm'
argstr_function = argstr_function.replace(', def=', ', drm=')
# some commands have different defaults than '-'
replacements_defaults = {'S1_import_SLC_from_zipfiles': {'OPOD_dir': '.'}}
if command_base in replacements_defaults.keys():
for key, value in replacements_defaults[command_base].items():
old = f"{key}='-'"
if isinstance(value, str):
new = f"{key}='{value}'"
else:
new = f"{key}={value}"
argstr_function = argstr_function.replace(old, new)
# create the function definition string
fun_def = 'def {name}({args_fun}, logpath=None, outdir=None, shellscript=None):' \
.format(name=command_base.replace('-', '_'),
args_fun=argstr_function)
if command_base == '2PASS_INT':
fun_def = fun_def.replace(command_base, 'TWO_PASS_INT')
######################################################################################
# special handling of flag args
flag_args = {'mk_adf2_2d': [('mli_dir', '-m', None),
('scale', '-s', None),
('exponent', '-e', None),
('update', '-u', False),
('dem_par', '-D', None)],
'mk_pol2rec_2d': [('scale', '-s', None),
('exp', '-e', None),
('min', '-a', None),
('max', '-b', None),
('rmax', '-R', None),
('mode', '-m', None),
('update', '-u', False)],
'mk_tab2': [('linenumber', '--linenumber', False)]}
# replace arg default like arg='-' with arg=None or arg=False
if command_base in flag_args:
for arg in flag_args[command_base]:
fun_def = re.sub(f'{arg[0]}=\'-\'', f'{arg[0]}={arg[2]}', fun_def)
######################################################################################
# create the process call argument string
# a '-' in the parameter name is replaced with '_'
# e.g. 'arg1, arg2, arg3'
# if a parameter is named 'def' (not allowed in Python) it is renamed to 'drm'
# inlist is not a proc arg but a parameter passed to function process
proc_args = arg_req + arg_opt
if command_base in inlist:
proc_args.remove('inlist')
proc_args_tmp = list(proc_args)
# insert the length of a list argument as a proc arg
if command_base in replacements.keys() and command_base != 'rascc_mask_thinning':
key = replacements[command_base][0][1]
if isinstance(key, list):
key = key[0]
proc_args_tmp.insert(proc_args_tmp.index(key), f'len({key})')
if command_base == 'validate':
index = proc_args_tmp.index('classes_inv')
proc_args_tmp.insert(index, 'len(classes_inv)')
argstr_process = ', '.join(proc_args_tmp) \
.replace('-', '_') \
.replace(', def,', ', drm,')
# create the process argument list string
cmd_str = "cmd = ['{command}', {args_cmd}]".format(command=command, args_cmd=argstr_process)
# special handling of optional flag args
# the args are removed from the cmd list and flags (plus values) added if not None or True
# e.g. '-u' if update=True or '-m /path' if mli_dir='/path'
if command_base in flag_args:
args = []
for arg in flag_args[command_base]:
cmd_str = cmd_str.replace(', {}'.format(arg[0]), '')
args.append(arg[0])
cmd_str += "\nif {a} is not {d}:\n{i}cmd.append('{k}')" \
.format(i=indent, d=arg[2], k=arg[1], a=arg[0])
if arg[2] is None:
cmd_str += '\n{i}cmd.append({a})'.format(i=indent, a=arg[0])
# create the process call string
proc_str = "process(cmd, logpath=logpath, outdir=outdir{inlist}, shellscript=shellscript)" \
.format(inlist=', inlist=inlist' if command_base in inlist else '')
fun_proc = '{0}\n{1}'.format(cmd_str, proc_str)
if command_base == 'lin_comb_cpx':
fun_proc = fun_proc.replace('factors_r, factors_i', 'zip(factors_r, factors_i)')
elif command_base == 'mosaic':
fun_proc = fun_proc.replace('data_in_list, DEM_par_list', 'zip(data_in_list, DEM_par_list)')
elif command_base == 'single_class_mapping':
fun_proc = fun_proc.replace('files, thres_lower, thres_upper', 'zip(files, thres_lower, thres_upper)')
######################################################################################
# create the function docstring
# find the start of the docstring and filter the result
doc_start = 'input parameters:[ ]*\n' if re.search('input parameters', out) else 'usage:.*(?=\n)'
doc = '\n' + out[re.search(doc_start, out).end():]
# define a pattern containing individual parameter documentations
pattern = r'\n[ ]*[<\[]*(?P{0})[>\]]*[\t ]+(?P.*)'.format(
'|'.join(arg_req_raw + arg_opt_raw).replace('.', r'\.'))
# identify the start indices of all pattern matches
starts = [m.start(0) for m in re.finditer(pattern, doc)] + [len(out)]
# filter out all individual (parameter, description) docstring tuples
doc_items = []
j = 0
done = []
for i in range(0, len(starts) - 1):
doc_raw = doc[starts[i]:starts[i + 1]]
doc_list = list(re.search(pattern, doc_raw, flags=re.DOTALL).groups())
if doc_list[0] not in proc_args:
if command_base in replacements.keys():
repl = replacements[command_base][0]
for k, item in enumerate(repl[1]):
if item not in done:
doc_items.append([item, repl[2][k]])
done.append(item)
j += 1
continue
if doc_list[0] in done:
doc_items[-1][1] += doc_raw
continue
while doc_list[0] != proc_args[j]:
doc_list_sub = [proc_args[j], 'not documented']
doc_items.append(doc_list_sub)
j += 1
doc_items.append(doc_list)
done.append(doc_items[-1][0])
j += 1
for k in range(j, len(proc_args)):
doc_items.append([proc_args[k], 'not documented'])
# add a parameter inlist to the docstring tuples
if command_base in inlist:
pos = [x[0] for x in doc_items].index(arg_opt[0])
doc_items.insert(pos, ('inlist', 'a list of arguments to be passed to stdin'))
# remove the replaced parameters from the argument lists
doc_items = [x for x in doc_items if x[0] in arg_req + arg_opt]
# replace parameter names which are not possible in Python syntax, i.e. containing '-' or named 'def'
for i, item in enumerate(doc_items):
par = item[0].replace('-', '_').replace(', def,', ', drm,')
description = item[1]
doc_items[i] = (par, description)
if command_base in ['par_CS_geo', 'par_KS_geo']:
doc_items.append(('MLI_par', '(output) ISP SLC/MLI parameter file (example: yyyymmdd.mli.par)'))
doc_items.append(('DEM_par', '(output) DIFF/GEO DEM parameter file (example: yyyymmdd.dem_par)'))
doc_items.append(('GEO', '(output) Geocoded image data file (example: yyyymmdd.geo)'))
# check if all parameters are documented:
proc_args = [x.replace('-', '_').replace(', def,', ', drm,') for x in arg_req + arg_opt]
mismatch = [x for x in proc_args if x not in [y[0] for y in doc_items]]
if len(mismatch) > 0:
raise RuntimeError('parameters missing in docsring: {}'.format(', '.join(mismatch)))
###########################################
# format the docstring parameter descriptions
docstring_elements = ['Parameters\n----------']
# do some extra formatting
for i, item in enumerate(doc_items):
par, description = item
description = re.split(r'\n+\s*', description.strip('\n'))
# escape * characters (which are treated as special characters for bullet lists by sphinx)
description = [x.replace('*', r'\\*') for x in description]
# convert all lines starting with an integer number or 'NOTE' to bullet list items
latest = None
for i in range(len(description)):
item = description[i]
if re.search('^(?:(?:-|)[-0-9]+|NOTE):', item):
latest = i
# prepend '* ' and replace missing spaces after a colon: 'x:x' -> 'x: x'
description[i] = '* ' + re.sub(r'((?:-|)[-0-9]+:)(\w+)', r'\1 \2', item)
# format documentation lines coming after the last bullet list item
# sphinx expects lines after the last bullet item to be indented by two spaces if
# they belong to the bullet item or otherwise a blank line to mark the end of the bullet list
if latest:
# case if there are still lines coming after the last bullet item,
# prepend an extra two spaces to these lines so that they are properly
# aligned with the text of the bullet item
if latest + 2 <= len(description):
i = 1
while latest + i + 1 <= len(description):
description[latest + i] = ' ' + description[latest + i]
i += 1
# if not, then insert an extra blank line
else:
description[-1] = description[-1] + '\n'
# parse the final documentation string for the current parameter
description = '\n{0}{0}'.join(description).format(indent)
doc = '{0}:\n{1}{2}'.format(par, indent, description)
docstring_elements.append(doc)
###########################################
# add docsrings of general parameters and combine the result
# create docstring for parameter logpath
doc = 'logpath: str or None\n{0}a directory to write command logfiles to'.format(indent)
docstring_elements.append(doc)
# create docstring for parameter outdir
doc = 'outdir: str or None\n{0}the directory to execute the command in'.format(indent)
docstring_elements.append(doc)
# create docstring for parameter shellscript
doc = 'shellscript: str or None\n{0}a file to write the Gamma commands to in shell format'.format(indent)
docstring_elements.append(doc)
# combine the complete docstring
fun_doc = '\n{header}\n\n{doc}\n' \
.format(header=header,
doc='\n'.join(docstring_elements))
######################################################################################
# combine the elements to a complete Python function string
fun = '''{defn}\n"""{doc}"""\n{proc}'''.format(defn=fun_def, doc=fun_doc, proc=fun_proc)
# indent all lines and add an extra empty line at the end
fun = fun.replace('\n', '\n{}'.format(indent)) + '\n'
return fun
def parse_module(bindir, outfile):
"""
parse all Gamma commands of a module to functions and save them to a Python script.
Parameters
----------
bindir: str
the `bin` directory of a module containing the commands
outfile: str
the name of the Python file to write
Returns
-------
Examples
--------
>>> import os
>>> from pyroSAR.gamma.parser import parse_module
>>> outname = os.path.join(os.environ['HOME'], 'isp.py')
>>> parse_module('/cluster/GAMMA_SOFTWARE-20161207/ISP/bin', outname)
"""
if not os.path.isdir(bindir):
raise OSError('directory does not exist: {}'.format(bindir))
excludes = ['coord_trans', # doesn't take any parameters and is interactive
'RSAT2_SLC_preproc', # takes option flags
'mk_ASF_CEOS_list', # "cannot create: Directory nonexistent"
'2PASS_UNW', # parameter name inconsistencies
'mk_diff_2d', # takes option flags
'gamma_doc' # opens the Gamma documentation
]
failed = []
outstring = ''
for cmd in sorted(finder(bindir, [r'^\w+$'], regex=True), key=lambda s: s.lower()):
basename = os.path.basename(cmd)
if basename not in excludes:
try:
fun = parse_command(cmd)
except RuntimeError as e:
failed.append('{0}: {1}'.format(basename, str(e)))
continue
except DeprecationWarning:
continue
except:
failed.append('{0}: {1}'.format(basename, 'error yet to be assessed'))
continue
outstring += fun + '\n\n'
if len(outstring) > 0:
if not os.path.isfile(outfile):
with open(outfile, 'w') as out:
out.write('from pyroSAR.gamma.auxil import process\n\n\n')
with open(outfile, 'a') as out:
out.write(outstring)
if len(failed) > 0:
info = 'the following functions could not be parsed:\n{0}\n({1} total)'
log.info(info.format('\n'.join(failed), len(failed)))
def autoparse():
"""
automatic parsing of GAMMA commands.
This function will detect the GAMMA installation via environment variable `GAMMA_HOME`, detect all available
modules (e.g. ISP, DIFF) and parse all the module's commands via function :func:`parse_module`.
A new Python module will be created called `gammaparse`, which is stored under `$HOME/.pyrosar`.
Upon importing the `pyroSAR.gamma` submodule, this function is run automatically and module `gammaparse`
is imported as `api`.
Returns
-------
Examples
--------
>>> from pyroSAR.gamma.api import diff
>>> print('create_dem_par' in dir(diff))
True
"""
home = ExamineGamma().home
target = os.path.join(os.path.expanduser('~'), '.pyrosar', 'gammaparse')
if not os.path.isdir(target):
os.makedirs(target)
for module in finder(home, ['[A-Z]*'], foldermode=2):
outfile = os.path.join(target, os.path.basename(module).lower() + '.py')
if not os.path.isfile(outfile):
log.info('parsing module {} to {}'.format(os.path.basename(module), outfile))
for submodule in ['bin', 'scripts']:
log.info(submodule)
try:
parse_module(os.path.join(module, submodule), outfile)
except OSError:
log.info('..does not exist')
modules = [re.sub(r'\.py', '', os.path.basename(x)) for x in finder(target, [r'[a-z]+\.py$'], regex=True)]
if len(modules) > 0:
with open(os.path.join(target, '__init__.py'), 'w') as init:
init.write('from . import {}'.format(', '.join(modules)))
================================================
FILE: pyroSAR/gamma/parser_demo.py
================================================
from pyroSAR.gamma.auxil import process
def adapt_filt(int, sm, width, low_SNR_thr='-', filt_width='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Adaptive bandpass filtering of interferograms
| Copyright 2023, Gamma Remote Sensing, v3.6 clw 18-Apr-2023
Parameters
----------
int:
(input) complex interferogram image filename
sm:
(output) smoothed interferogram filename
width:
number of samples/row
low_SNR_thr:
low SNR threshold (enter - for default: .25);
filt_width:
filter width in pixels (enter - for default: 1.0)
xmin:
offset to starting range pixel(enter - for default: 0)
xmax:
offset last range pixel (enter - for default: width-1)
ymin:
offset to starting azimuth row (enter - for default: 0)
ymax:
offset to last azimuth row (enter - for default: nlines-1)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/adapt_filt', int, sm, width, low_SNR_thr, filt_width, xmin, xmax, ymin, ymax]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def adf(interf, sm, cc, width, alpha='-', nfft='-', cc_win='-', step='-', loff='-', nlines='-', wfrac='-', logpath=None, outdir=None, shellscript=None):
"""
| Adaptive interferogram bandpass filter based on the power spectral density
| Copyright 2024, Gamma Remote Sensing, v3.9 12-Mar-2024 clw/cm
Parameters
----------
interf:
(input) interferogram (fcomplex)
sm:
(output) filtered interferogram (fcomplex)
cc:
(output) filtered interferogram correlation coefficient (float)
width:
number of samples/line
alpha:
exponent for non-linear filtering (enter - for default: 0.40)
nfft:
filtering FFT window size, 2\\*\\*N, 8 --> 512, (enter - for default: 32)
cc_win:
correlation parameter estimation window size odd, max: 15 (enter - for default: 5)
step:
processing step (enter - for default: nfft/8)
loff:
offset to starting line to process (enter - for default: 0)
nlines:
number of lines to process (enter - for default: to end of file)
wfrac:
minimum fraction of points required to be non-zero in the filter window (enter - for default: 0.500)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/adf', interf, sm, cc, width, alpha, nfft, cc_win, step, loff, nlines, wfrac]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def adf2(interf, cc_interf, sm, cc_filt, width, alpha_max='-', nfft='-', cc_win='-', step='-', loff='-', nlines='-', wfrac='-', logpath=None, outdir=None, shellscript=None):
"""
| Adaptive interferogram filter based on the power spectral density and correlation coefficient
| Copyright 2023, Gamma Remote Sensing, v1.2 18-Apr-2023 clw/cm
Parameters
----------
interf:
(input) complex interferogram (fcomplex)
cc_interf:
(input) correlation coefficient of the input interferogram (float)
sm:
(output) filtered interferogram (fcomplex)
cc_filt:
(output) filtered interferogram correlation coefficient (float)
width:
number of samples/line
alpha_max:
maximum value for the adaptive filter exponent (enter - for default: 0.50)
nfft:
filter window FFT size, 2\\*\\*N, 8->512, (enter - for default: 32)
cc_win:
filtered interferogram correlation estimation window size odd, max: 21 (enter - for default: 9)
step:
processing step in range and azimuth (enter - for default: nfft/8)
loff:
offset to starting line to process (enter - for default: 0)
nlines:
number of lines to process (enter - for default: to end of file)
wfrac:
minimum fraction of points required to be non-zero in the filter window (enter - for default: 0.200)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/adf2', interf, cc_interf, sm, cc_filt, width, alpha_max, nfft, cc_win, step, loff, nlines, wfrac]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def af_SLC(SLC_par, SLC, rwin='-', azwin='-', dr='-', daz='-', thres='-', a1_flg='-', b0_flg='-', offsets='-', n_ovr='-', roff='-', azoff='-', logpath=None, outdir=None, shellscript=None):
"""
| Focus testing for SLC data using autofocus estimation of effective velocity
| Copyright 2023, Gamma Remote Sensing, v1.6 18-Apr-2023 clw/uw
Parameters
----------
SLC_par:
(input) ISP SLC image parameter file
SLC:
(input) single-look complex image
rwin:
range window size (enter - for default: 1024)
azwin:
azimuth window size (enter - for default: 4096)
dr:
range sample increment (enter - for default: 1024, enter 0 for single patch)
daz:
azimuth line increment (enter - for default: 8192, enter 0 for single patch)
thres:
offset estimation SNR threshold (enter - for default: 10.000)
a1_flg:
fit a1 for first derivative of the effective velocity w.r.t.range (enter - for default)
* 0: no (default)
* 1: yes
b0_flg:
fit b0 for first derivative of the effective velocity w.r.t. along-track time (enter - for default)
* 0: no (default)
* 1: yes
offsets:
(output) range and azimuth offsets and SNR data in text format (enter - for no output)
n_ovr:
SLC oversampling factor (1,2,4: enter - for default: 1)
roff:
range offset for single patch center (enter - for default: image center in range)
azoff:
azimuth offset for single patch center (enter - for default: image center in azimuth)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/af_SLC', SLC_par, SLC, rwin, azwin, dr, daz, thres, a1_flg, b0_flg, offsets, n_ovr, roff, azoff]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ASAR_LO_phase_drift(SLC1_par, SLC2_par, OFF_par, ph_drift, logpath=None, outdir=None, shellscript=None):
"""
| Calculate interferometric phase correction due to drift of the ASAR local oscillator
| Copyright 2023, Gamma Remote Sensing, v1.2 18-Apr-2023 clw
Parameters
----------
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
ph_drift:
(output) interferometric phase correction due to drift of the ASAR LO (radians)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ASAR_LO_phase_drift', SLC1_par, SLC2_par, OFF_par, ph_drift]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ASAR_XCA(ASA_XCA, antenna, swath='-', pol='-', logpath=None, outdir=None, shellscript=None):
"""
| Interpretation of ASAR external calibration data file (ASA_XCA)
| Copyright 2006, Gamma Remote Sensing, v1.1 7-June-2006 awi/uw/clw
Parameters
----------
ASA_XCA:
(input) ASAR external calibration data file (binary)
antenna:
(output) 1-way antenna gain pattern file or '-' (if not provided)
or 'all' to generate all ASAR antenna diagrams
swath:
ASAR swath (IS1,IS2,...IS7;SS1,SS2,...SS5)
pol:
polarization (HH,VV,HV,VH)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ASAR_XCA', ASA_XCA, antenna, swath, pol]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ave_cpx(cpx_list, width, cpx_ave, start='-', nlines='-', zflag='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate average of a set of FCOMPLEX images
| Copyright 2022, Gamma Remote Sensing, v2.1 17-Aug-2022 clw/cm
Parameters
----------
cpx_list:
(input) list of coregistered images (FCOMPLEX)
width:
number of samples/line
cpx_ave:
(output) average of images listed in cpx_list (FCOMPLEX)
start:
starting line (enter - for default: 1)
nlines:
number of lines to process (enter - for default: entire file)
zflag:
zero flag (enter - for default)
* 0: interpret 0.0 as missing data value (default)
* 1: interpret 0.0 as valid data
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ave_cpx', cpx_list, width, cpx_ave, start, nlines, zflag]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ave_image(im_list, width, ave_image, start='-', nlines='-', pixav_x='-', pixav_y='-', zflag='-', nmin='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate average of a set of FLOAT images
| Copyright 2022, Gamma Remote Sensing, v2.6 17-Aug-2022 clw/cm
Parameters
----------
im_list:
(input) list of coregistered images (FLOAT)
width:
number of samples/line
ave_image:
(output) average of images listed in im_list (FLOAT)
start:
starting line (enter - for default: 1)
nlines:
number of lines to process (enter - for default: entire file)
pixav_x:
number of pixels to average in width (enter - for default: 1)
pixav_y:
number of pixels to average in height (enter - for default: 1)
zflag:
zero flag (enter - for default)
* 0: interpret 0.0 as missing data value (default)
* 1: interpret 0.0 as valid data
nmin:
minimum number of images required to calculate the average if zflag = 0 (enter - for default: 3/4\\*nfiles)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ave_image', im_list, width, ave_image, start, nlines, pixav_x, pixav_y, zflag, nmin]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def az_integrate(data, width, azi, cflg, scale='-', lz='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate azimuth integral of float data (unwrapped phase or azimuth offsets)
| Copyright 2012, Gamma Remote Sensing, v1.2 6-Feb-2012
Parameters
----------
data:
(input) input data (example: SBI dtrapped phase) (float)
width:
(input) number of range samples/line
azi:
(output) input data integrated along azimuth (float)
cflg:
integration constant flag:
* 0: set azimuth integral value to 0.0 at specified line
* 1: set average of the azimuth integral to 0.0
scale:
scale factor to apply to the data (enter - for default, default: 1.0)
lz:
line offset where the azimuth integral is set to 0.0 (cflg = 0, enter - for default, default: 0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/az_integrate', data, width, azi, cflg, scale, lz]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def az_spec_SLC(SLC, SLC_par, spectrum, roff='-', namb='-', pltflg='-', logpath=None, outdir=None, shellscript=None):
"""
| Doppler centroid estimate from SLC images
| Copyright 2023, Gamma Remote Sensing, v3.0 19-Apr-2023 clw
Parameters
----------
SLC:
(input) SAR image data file (FCOMPLEX or SCOMPLEX format)
SLC_par:
(input) ISP SLC image parameter file
spectrum:
(output) Doppler spectrum (text format)
roff:
range sample offset to center of estimation window (enter - for default: center of swath)
namb:
number of multiples of the PRF to add to the estimated centroid (enter - for default: 0)
pltflg:
azimuth spectrum plotting flag (enter - for default)
* 0: none (default)
* 1: output plot in PNG format
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/az_spec_SLC', SLC, SLC_par, spectrum, roff, namb, pltflg]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def base_copy(SLC1_par, baseline1, SLC2_par, baseline2, time_rev='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate baseline file for a subsection of a reference SLC
| Copyright 2023, Gamma Remote Sensing, v1.2 24-Apr-2023 ts/clw/uw
Parameters
----------
SLC1_par:
(input) ISP image parameter file of the reference SLC
baseline1:
(input) baseline file derived using the reference SLC geometry
SLC2_par:
(input) ISP image parameter file corresponding to the subsection of the reference SLC
baseline2:
(output) baseline file derived using the geometry and timing of the SLC subsection
time_rev:
SLC image time reversal flag (enter - for default)
* 1: normal (default)
* -1: time-reversed
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_copy', SLC1_par, baseline1, SLC2_par, baseline2, time_rev]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def base_est_fft(interf, SLC1_par, OFF_par, baseline, nazfft='-', r_samp='-', az_line='-', nrfft='-', logpath=None, outdir=None, shellscript=None):
"""
| Estimate baseline from interferogram phase spectrum
| Copyright 2023, Gamma Remote Sensing, v2.3 clw/uw 18-Apr-2023
Parameters
----------
interf:
(input) multilook interferogram with residual range and azimuth fringes
SLC1_par:
(input) SLC1 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
baseline:
(output) baseline file
nazfft:
size of azimuth FFT (2\\*\\*N) (enter - for default: 512)
r_samp:
range pixel offset to center of the FFT window (enter - for default: center)
az_line:
line offset from start of the interf. for the FFT window (enter - for default: center)
nrfft:
size of the range FFT (2\\*\\*N), minimum: 32 (enter - for default: 512)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_est_fft', interf, SLC1_par, OFF_par, baseline, nazfft, r_samp, az_line, nrfft]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def base_init(SLC1_par, SLC2_par, OFF_par, interf, baseline, mflag='-', nrfft='-', nazfft='-', r_samp='-', az_line='-', logpath=None, outdir=None, shellscript=None):
"""
| Estimate initial baseline using orbit state vectors, offsets, and interferogram phase
| Copyright 2023, Gamma Remote Sensing, v2.8 18-Apr-2023 clw/uw/cm
Parameters
----------
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file (enter - for none)
interf:
(input) unflattened interferogram (enter - for none)
base (output) baseline parameter file
baseline:
not documented
mflag:
baseline estimation method flag (enter - for default)
mflag b_para b_perp input
* 0: orbits orbits p1,p2 (default)
* 1: offsets offsets p1,p2,off
* 2: orbits fft p1,p2,off,int
* 3: offsets fft p1,p2,off,int
* 4: fft fft p1,off,int
nrfft:
size of range FFT (512, 1024,...) (enter - for default determined from image width)
nazfft:
size of azimuth FFT (512, 1024,...) (enter - for default determined from image azimuth lines)
r_samp:
range pixel offset to center of the FFT window (enter - for default, default: range center)
az_line:
line offset from start of the interf. for the FFT window (enter - for default, default: azimuth center)
* NOTE: Not all input data files are required for the different methods
enter - for files that are not provided
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_init', SLC1_par, SLC2_par, OFF_par, interf, baseline, mflag, nrfft, nazfft, r_samp, az_line]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def base_ls(SLC_par, OFF_par, gcp_ph, baseline, ph_flag='-', bc_flag='-', bn_flag='-', bcdot_flag='-', bndot_flag='-', bperp_min='-', SLC2R_par='-', logpath=None, outdir=None, shellscript=None):
"""
| Least squares baseline estimation using terrain heights
| Copyright 2023, Gamma Remote Sensing, v2.4 18-Apr-2023 clw/uw/cm
Parameters
----------
SLC_par:
(input) ISP parameter file of the reference SLC
OFF_par:
(input) ISP interferogram/offset parameter file
gcp_ph:
(input) ground control point heights + extracted unwrapped phase (text format)
baseline:
(input) baseline parameter file
ph_flag:
restore range phase ramp (enter - for default)
* 0: do not restore (default)
* 1: restore
bc_flag:
cross-track baseline component estimate (enter - for default)
* 0: orbit-derived
* 1: estimate from data (default)
bn_flag:
normal baseline component estimate (enter - for default)
* 0: orbit-derived
* 1: estimate from data (default)
bcdot_flag:
cross-track baseline rate estimate (enter - for default)
* 0: orbit-derived
* 1: estimate from data (default)
bndot_flag:
normal baseline rate estimate (enter - for default)
* 0: orbit-derived (default)
* 1: estimate from data
bperp_min:
minimum perpendicular baseline required for L.S estimation (m, enter - for default: 10.0)
SLC2R_par:
(input) parameter file of resampled SLC, required if SLC2 frequency differs from SLC1 (enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_ls', SLC_par, OFF_par, gcp_ph, baseline, ph_flag, bc_flag, bn_flag, bcdot_flag, bndot_flag, bperp_min, SLC2R_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def base_orbit(SLC1_par, SLC2_par, baseline, logpath=None, outdir=None, shellscript=None):
"""
| Estimate baseline from orbit state vectors
| Copyright 2023, Gamma Remote Sensing, v4.5 clw/cm 18-Apr-2023
Parameters
----------
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
baseline:
(output) baseline file (text format, enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_orbit', SLC1_par, SLC2_par, baseline]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def base_perp(baseline, SLC1_par, OFF_par, time_rev='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate baseline components perpendicular and parallel to look vector
| Copyright 2023, Gamma Remote Sensing, v3.6 18-Apr-2023 clw/uw
Parameters
----------
baseline:
(input) baseline file (text)
SLC1_par:
(input) ISP parameter file of SLC1 (reference SLC)
OFF_par:
(input) ISP interferogram/offset parameter file
time_rev:
SLC image time reversal flag (enter - fo default)
* 1: normal (default)
* -1: time-reversed
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_perp', baseline, SLC1_par, OFF_par, time_rev]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def bpf(data_in, data_out, width, fc_x, bw_x, fc_y, bw_y, roff='-', azoff='-', nr='-', naz='-', dtype='-', zflag='-', beta='-', fir_len='-', logpath=None, outdir=None, shellscript=None):
"""
| Interferometric SAR Processor (ISP): Program GAMMA_SOFTWARE-20250625/ISP/bin/bpf.c
| Copyright 2023, Gamma Remote Sensing, v1.9 18-Apr-2023 clw
| Bandpass filter for 2-dimensional image data (FCOMPLEX, SCOMPLEX, and FLOAT)
Parameters
----------
data_in:
(input) input image data file
data_out:
(output) bandpass filtered image data
width:
number of samples/line
fc_x:
normalized x-coord. (across) filter center frequency (range: -0.5 --> 0.5)
bw_x:
normalized x-coord. bandwidth (range: 0 --> 1.0)
fc_y:
normalized y-coord. (down) filter center frequency (range: -0.5 --> 0.5)
bw_y:
normalized y-coord. bandwidth (range: 0 --> 1.0)
roff:
offset to starting range to filter (enter - for default: 0)
azoff:
offset to starting azimuth to filter (enter - for default: 0)
nr:
number of range pixels to filter (enter - for default: width - roff)
naz:
number of azimuth lines to filter (enter - for default: nlines - azoff)
dtype:
data type (enter - for default)
* 0: FCOMPLEX (default)
* 1: SCOMPLEX
* 2: FLOAT
zflag:
zero data flag (enter - for default)
* 0: set output to 0.0 when the input data are 0.0 (no_data)(default)
* 1: 0.0 values are considered as valid data
beta:
Kaiser window beta parameter (enter - for default: 4.538)
fir_len:
finite impulse response filter length (enter - for default: 64)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/bpf', data_in, data_out, width, fc_x, bw_x, fc_y, bw_y, roff, azoff, nr, naz, dtype, zflag, beta, fir_len]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def bridge_unw(int, flag, unw, bridge, width, xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Phase unwrap new regions with bridges to regions already unwrapped
| Copyright 2023, Gamma Remote Sensing, v1.5 19-Apr-2023 clw
Parameters
----------
int:
(input) interferogram (FCOMPLEX)
flag:
(input) unwrapping flag file
unw:
(input/output) unwrapped phase (FLOAT)
bridge:
(input) bridge data file (text format)
width:
number of samples/row
xmin:
starting range pixel offset to unwrap (enter - for default: 0)
xmax:
last range pixel offset to unwrap (enter - for default: width-1)
ymin:
starting azimuth row offset to unwrap, relative to start (enter - for default: 0)
ymax:
last azimuth row offset to unwrap, relative to start (enter - for default: nlines-1)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/bridge_unw', int, flag, unw, bridge, width, xmin, xmax, ymin, ymax]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def cc_wave(interf, MLI1, MLI2, cc, width, bx='-', by='-', wflg='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Estimate interferometric correlation coefficient
| Copyright 2023, Gamma Remote Sensing, v6.4 6-Dec-2023 clw/uw/cm
Parameters
----------
interf:
(input) normalized complex interferogram (FCOMPLEX)
MLI1:
(input) multilook intensity image of the first scene (FLOAT) (enter - for none)
MLI2:
(input) multilook intensity image of the second scene (FLOAT) (enter - for none)
cc:
(output) estimated correlation coefficient (FLOAT)
width:
number of samples/line
bx:
estimation window size in columns (enter - for default: 5.0)
by:
estimation window size in lines (enter - for default: 5.0)
wflg:
estimation window (enter - for default):
* 0: rectangular (constant weighting) (default)
* 1: circular triangular
* 2: circular Gaussian
* 3: normalized vector sum with rectangular window (constant weighting)
* NOTE: This estimator does not use the MLI data
xmin:
starting range pixel offset (enter - for default: 0)
xmax:
last range pixel offset (enter - for default: width - 1)
ymin:
starting azimuth row offset, relative to start (enter - for default: 0)
ymax:
last azimuth row offset, relative to start (enter - for default: nlines - 1)
* NOTE: The normalized vector sum (wflg = 3) is used as estimator when the MLI images are not provided.
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/cc_wave', interf, MLI1, MLI2, cc, width, bx, by, wflg, xmin, xmax, ymin, ymax]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def clear_flag(flag, width, flag_bits, xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Clear phase unwrapping flag bits
| Copyright 2023, Gamma Remote Sensing, v1.7 19-Apr-2023 clw
Parameters
----------
flag:
(input)phase unwrapping flag filename
width:
number of samples/row
flag_bits:
byte with value of flag(s) to be cleared:
Charges = 3 Guides = 4 Low SNR = 8 Visited = 16
BRANCH PT. = 32 Cuts = 64 Lawn = 128
xmin:
starting range pixel offset (enter - for default: 0)
xmax:
last range pixel offset (enter - for default: width-1)
ymin:
starting azimuth row offset, relative to start (enter - for default: 0)
ymax:
last azimuth row offset, relative to start (enter - for default: nlines-1)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/clear_flag', flag, width, flag_bits, xmin, xmax, ymin, ymax]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def corr_flag(corr, flag, width, corr_thr, xmin='-', xmax='-', ymin='-', ymax='-', border='-', logpath=None, outdir=None, shellscript=None):
"""
| Low correlation region detection for phase unwrapping
| Copyright 2023, Gamma Remote Sensing, v2.6 19-Apr-2023 clw/uw
Parameters
----------
corr:
(input)interferometric correlation file
flag:
(input/output) phase unwrapping flag filename
width:
number of samples/row
corr_thr:
correlation threshold (0 --> 1.0)
xmin:
starting range pixel offset (enter - for default: 0)
xmax:
last range pixel offset (enter - for default: width-1)
ymin:
starting azimuth row offset, relative to start (enter - for default: 0)
ymax:
last azimuth row offset, relative to start (enter - for default: nlines-1)
border:
effective range of low coherence pixels to set low coherence flag (enter - for default: 2)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/corr_flag', corr, flag, width, corr_thr, xmin, xmax, ymin, ymax, border]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def create_offset(SLC1_par, SLC2_par, OFF_par, algorithm='-', rlks='-', azlks='-', iflg='-', logpath=None, outdir=None, shellscript=None):
"""
| Create and update ISP offset and interferogram parameter files
| Copyright 2023 Gamma Remote Sensing v5.6 18-Apr-2023 clw/uw/cm
Parameters
----------
SLC1_par:
(input) SLC1/MLI1 ISP image parameter filename (reference)
SLC2_par:
(input) SLC2/MLI2 ISP image parameter filename
OFF_par:
(input/output) ISP offset/interferogram parameter file
algorithm:
offset estimation algorithm
* 1: intensity cross-correlation (default)
* 2: fringe visibility
rlks:
number of interferogram range looks (enter - for default: 1)
azlks:
number of interferogram azimuth looks (enter - for default: 1)
iflg:
interactive mode flag (enter - for default)
* 0: non-interactive
* 1: interactive (default)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/create_offset', SLC1_par, SLC2_par, OFF_par, algorithm, rlks, azlks, iflg]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def dcomp_sirc(infile, outfile, samples, loff='-', nlines='-', logpath=None, outdir=None, shellscript=None):
"""
| Extract SIR-C SLC compressed single-pol data
| Copyright 2023, Gamma Remote Sensing, v1.5 18-Apr-2023 clw
Parameters
----------
infile:
(input) SIR-C single-pol SLC compressed data
outfile:
(output) complex floating point data
samples:
number of polarimetric samples per input line (4 bytes/sample)
loff:
offset to starting line (enter - for default: 0)
nlines:
number of lines to copy (enter - or 0 for default: entire file)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/dcomp_sirc', infile, outfile, samples, loff, nlines]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def dcomp_sirc_quad(infile, outfile, samples, parameter, loff='-', nlines='-', logpath=None, outdir=None, shellscript=None):
"""
| Extract SIR-C MLC or SLC compressed quad-pol data
| Copyright 2023, Gamma Remote Sensing, v1.5 18-Apr-2023 uw/clw
Parameters
----------
infile:
(input) SIR-C SLC or MLC quad-pol compressed data
outfile:
(output) complex floating point data
samples:
number of polarimetric samples per input line (10 bytes/sample)
parameter:
polarimetric parameter to extract from SLC or MLC product:
* 0: SLC total power
* 1: SLC-HH
* 2: SLC-HV
* 3: SLC-VH
* 4: SLC-VV
* 5: MLC total power
* 6: MLC-HVHV\\*
* 7: MLC-VVVV\\*
* 8: MLC-HHHH\\*
* 9: MLC-HHHV\\*
* 10: MLC-HHVV\\*
* 11: MLC-HVVV\\*
loff:
offset to starting line (enter - for default: 0)
nlines:
number of lines to copy (enter - or 0 for default: entire file)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/dcomp_sirc_quad', infile, outfile, samples, parameter, loff, nlines]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def DELFT_vec2(SLC_par, DELFT_dir, nstate='-', interval='-', ODR='-', logpath=None, outdir=None, shellscript=None):
"""
| Extract and interpolate DELFT ERS-1, ERS-2, and ENVISAT state vectors
| Copyright 2023, Gamma Remote Sensing, v2.7 19-Apr-2023 clw
Parameters
----------
SLC_par:
(input) ISP image parameter file
DELFT_dir:
directory containing Delft orbit arclist and ODR files for ERS-1, ERS-2 or ENVISAT
* NOTE: enter . for current directory
nstate:
number of state vectors to generate (enter - for default, >= 15)
interval:
time interval between state vectors in the ISP image parameter file (s) (enter - for default: 10.0)
ODR:
ODR file to use (include path) rather than ODR file determined from the Delft orbit arclist (enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/DELFT_vec2', SLC_par, DELFT_dir, nstate, interval, ODR]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def doppler_2d_SLC(SLC, SLC_par, dop2d, loff='-', blsz='-', nbl='-', a2_flg='-', b0_flg='-', b1_flg='-', c0_flg='-', namb='-', logpath=None, outdir=None, shellscript=None):
"""
| 2-D Doppler centroid trend estimation from SLC data
| Copyright 2025, Gamma Remote Sensing, v1.3 14-May-2025 clw/cm
Parameters
----------
SLC:
(input) SLC image (SCOMPLEX or FCOMPLEX format)
SLC_par:
(input) SLC parameter file
dop2d:
(output) estimated doppler centroid as a function of range for each block (text format) (enter - for none)
loff:
number of lines offset (enter - for default: 0)
blsz:
block size lines, minimum: 256 (enter - for default: 2048)
nbl:
number of blocks (enter - for default: calculated automatically)
a2_flg:
fit a2 for second derivative of the Doppler centroid w.r.t.range (Hz/m/m) (enter - for default)
* 0: no (default)
* 1: yes
b0_flg:
fit b0 for first derivative of the Doppler centroid w.r.t. along-track time (Hz/sec) (enter - for default)
* 0: no
* 1: yes (default)
b1_flg:
fit b1 for along-track rate of the change in slope of Doppler w.r.t. range (Hz/sec/m)(enter - for default)
* 0: no
* 1: yes (default)
c0_flg:
fit c0 for second derivative of the Doppler centroid w.r.t. along-track time (Hz/sec/sec) (enter - for default)
* 0: no (default)
* 1: yes
namb:
user defined number of Doppler ambiguities to add to the Doppler function (enter - for default: 0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/doppler_2d_SLC', SLC, SLC_par, dop2d, loff, blsz, nbl, a2_flg, b0_flg, b1_flg, c0_flg, namb]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def DORIS_vec(SLC_par, DOR, nstate='-', logpath=None, outdir=None, shellscript=None):
"""
| Extract ENVISAT DORIS state vectors and write to an ISP image parameter file
| Copyright 2023, Gamma Remote Sensing, v1.5 18-Apr-2023 clw
Parameters
----------
SLC_par:
(input/output)ISP SLC/MLI image parameter file
DOR:
(input) ASAR DORIS data file (DOR_VOR_AXVF)
nstate:
number of state vectors to extract (enter - for default: 11)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/DORIS_vec', SLC_par, DOR, nstate]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def error_stat(d1, d2, width, dtype, roff, loff, nr, nl, report, logpath=None, outdir=None, shellscript=None):
"""
| Calculate statistics for two data files and their difference (FLOAT or FCOMPLEX)
| Copyright 2017, Gamma Remote Sensing, v1.2 clw 7-Jan-2016
Parameters
----------
d1:
(input) data file 1
d2:
(input) data file 2
width:
image line width (samples/line)
dtype:
data type for d1 and d2:
* 0: FLOAT
* 1: FCOMPLEX
roff:
sample offset to region start (enter - for default: 0)
loff:
line offset to region start (enter - for default: 0)
nr:
region width (samples, enter - for default: width - roff)
nl:
number of lines in the region (enter - for default: data_lines - loff)
report:
output text file (keyword:value format)
keywords: data_1, data_2, d1_mean, d2_mean, d1_stddev, d2_stddev, root_mean_square_error, normalized_mean_square_error,
cross_correlation_coefficient, cross_correlation_angle, total_samples, non_zero_samples, valid_fraction
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/error_stat', d1, d2, width, dtype, roff, loff, nr, nl, report]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def fill_gaps(data_in, width, data_out, dtype='-', method='-', max_dist='-', bp_flag='-', win='-', ds_method='-', ds_size='-', ds_data='-', logpath=None, outdir=None, shellscript=None):
"""
| Fill gaps in 2D raster file
| Copyright 2023, Gamma Remote Sensing, v2.4 18-Apr-2023 cm
Parameters
----------
data_in:
(input) input data file (FLOAT / FCOMPLEX)
width:
width of input data
data_out:
(output) output data file (FLOAT / FCOMPLEX)
dtype:
input and output data type (enter - for default)
* 0: FLOAT (default)
* 1: FCOMPLEX
method:
method flag (enter - for default: 4)
* 0: Laplace interpolation and linear extrapolation - least squares solution
* 1: Laplace interpolation and linear extrapolation - smaller system of linear equations than in method #0 in case of few missing values - least squares solution
* 2: Laplace interpolation and linear extrapolation - solves a direct linear system of equations for the missing values (not a least squares solution)
* 3: biharmonic interpolation - implementation similar to method #1 - least squares solution
* 4: spring analogy: assumes springs (with a nominal length of zero) connect each node with every neighbor - least squares solution (default)
* 5: average of the 8 nearest neighbors - this method solves a direct linear system for the missing values (not a least squares solution)
* NOTE: small gaps: use method #0, #1 or #3 - large gaps: use method #2, #4 or #5 - most demanding: method #3
max_dist:
maximum interpolation / extrapolation distance in pixels (enter - or 0 for default: unlimited)
bp_flag:
perform block processing (enter - for default: 0)
* 0: no block processing (default)
* 1: block processing (faster, avoid overflow, however might be slightly less accurate)
* NOTE: when block processing is selected, a two-step process is carried out: 1: solving the downsampled array (coarse processing), 2: block processing
win:
block size (pixels, 10 < win < 1000, enter - for default: 100)
ds_method:
method flag (0 - 5, same choices as for [method] option) (enter - for default: same as [method])
* NOTE: for an input containing large gaps, method #2, #4 or #5 may yield more appropriate results.
ds_size:
maximum size of downsampled data (for both width and height) (pixels, ds_size > 10, enter - for default: 400)
ds_data:
(output) write intermediate data after solving the downsampled array (FLOAT / FCOMPLEX)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/fill_gaps', data_in, width, data_out, dtype, method, max_dist, bp_flag, win, ds_method, ds_size, ds_data]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def fspf(data_in, data_out, width, dtype='-', r_max='-', spf_type='-', MLI_par='-', interp_mode='-', order='-', logpath=None, outdir=None, shellscript=None):
"""
| ISP fspf: Fast spatial filter for 2D data
| Copyright 2025, Gamma Remote Sensing, v2.0 9-Apr-2025 of/clw/uw/cm
Parameters
----------
data_in:
(input) input image data
data_out:
(output) spatially filtered image data
width:
number of samples/row
dtype:
data type (enter - for default):
* 0: FCOMPLEX
* 1: SCOMPLEX
* 2: FLOAT (default)
r_max:
maximum filter radius (range samples) (enter - for default: 64)
spf_type:
spatial filter type (enter - for default):
* 0: uniform average (default for FCOMPLEX and SCOMPLEX)
* 1: triangular weighted average: 1 - (r/r_max)
* 2: quadratic weighted average: 1 - (r/r_max)^2
* 3: Gaussian weighted average: exp(-2.\\*(r^2/r_max^2))
* 4: linear least-squares (default for FLOAT data)
* 5: median
MLI_par:
MLI or SLC parameter file with the same number of looks as the input image, required for GPRI data (enter - for none)
interp_mode:
interpolation method for resampling the data to the original size after filtering
* 0: bicubic spline (default)
* 1: bicubic spline sqrt(x)
* 2: B-spline interpolation (default B-spline degree: 3)
* 3: B-spline interpolation sqrt(x) (default B-spline degree: 3)
order:
B-Spline interpolation degree (2->9) (enter - default: 3)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/fspf', data_in, data_out, width, dtype, r_max, spf_type, MLI_par, interp_mode, order]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def gcp_phase(unw, OFF_par, gcp, gcp_ph, win_sz='-', logpath=None, outdir=None, shellscript=None):
"""
| Extract unwrapped phase at GCP locations
| Copyright 2023, Gamma Remote Sensing, v1.6 19-Apr-2023 clw
Parameters
----------
unw:
(input) unwrapped interferometric phase
OFF_par:
(input) ISP interferogram/offset parameter file
gcp:
(input) ground control point data (text format)
gcp_ph:
(output) ground control point data + extracted unwrapped phase (text)
win_sz:
window size for averaging phase for each GCP, must be odd (enter - for default: 1)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/gcp_phase', unw, OFF_par, gcp, gcp_ph, win_sz]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def grasses(int, flag, unw, width, xmin='-', xmax='-', ymin='-', ymax='-', xinit='-', yinit='-', init_ph='-', logpath=None, outdir=None, shellscript=None):
"""
| Phase unwrapping by region growing
| Copyright 2023, Gamma Remote Sensing, v4.4 19-Apr-2023 clw/uw
Parameters
----------
int:
(input) interferogram filename
flag:
(input) unwrapping flag filename
unw:
(output) unwrapped phase filename
width:
number of samples/row
xmin:
starting range pixel offset (enter - for default: 0)
xmax:
last range pixel offset (enter - for default: width-1)
ymin:
starting azimuth row offset, relative to start (enter - for default: 0)
ymax:
last azimuth row offset, relative to start (enter - for default: nlines-1)
xinit:
starting range pixel for unwrapping (enter - for default: width/2)
yinit:
starting row to unwrap (enter - for default: height/2)
init_ph:
flag to set phase at starting point to 0.0 (enter - for default)
* 0: not set to 0.0 (default)
* 1: set to 0.0
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/grasses', int, flag, unw, width, xmin, xmax, ymin, ymax, xinit, yinit, init_ph]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def GRD_to_SR(GRD_par, MLI_par, OFF_par, in_file, out_file, rlks='-', azlks='-', interp_mode='-', sr_rsp='-', sr_azsp='-', degree='-', logpath=None, outdir=None, shellscript=None):
"""
| Conversion to slant range for ISP MLI and INSAR ground range data of type FLOAT
| Copyright 2023, Gamma Remote Sensing, v2.5 18-Apr-2023 uw/clw/cm
Parameters
----------
GRD_par:
(input) SLC parameter file of output ground range image
MLI_par:
(input/output) MLI ISP image parameter file for slant range image
* NOTE: delete an existing MLI parameter file to recalculate the output MLI parameters
OFF_par:
(input) ISP offset/interferogram parameter file of input image (enter - image in MLI geometry)
in_file:
(input) ground range image (FLOAT)
out_file:
(output) slant range image (FLOAT)
rlks:
multi-looking in range (prior to resampling, enter - for default: 1)
azlks:
multi-looking in azimuth (prior to resampling, enter - for default: 1)
interp_mode:
interpolation mode (enter - for default)
* 0: nearest-neighbor
* 1: bicubic spline
* 2: bicubic spline log(x)
* 3: bicubic spline sqrt(x)
* 4: B-spline interpolation (default B-spline degree: 3)
* 5: B-spline interpolation sqrt(x) (default) (default B-spline degree: 3)
* NOTE: log and sqrt interpolation modes should only be used with non-negative data!
sr_rsp:
output image slant range sample spacing (m) (enter - for default: c/(2\\*adc_sampling_rate)
sr_azsp:
output image azimuth sample spacing (m) (enter - for default: (input image azimuth spacing) \\* azlks)
degree:
B-spline degree (2->9) (enter - for default: 3)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/GRD_to_SR', GRD_par, MLI_par, OFF_par, in_file, out_file, rlks, azlks, interp_mode, sr_rsp, sr_azsp, degree]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def hgt_map(unw, SLC_par, OFF_par, baseline, hgt, gr, ph_flag='-', loff='-', nlines='-', SLC2R_par='-', logpath=None, outdir=None, shellscript=None):
"""
| Interferometric height/ground range estimation vs. slant range
| Copyright 2023, Gamma Remote Sensing, v5.3 clw/uw 18-Apr-2023
Parameters
----------
unw:
(input) unwrapped interferometric phase
SLC_par:
(input) ISP parameter file for the reference SLC
OFF_par:
(input) ISP offset/interferogram processing parameters
baseline:
(input) baseline parameter file
hgt:
(output) height file (in slant range geometry) relative to the WGS-84 ellipsoid
gr:
(output) cross-track ground ranges on the WGS-84 ellipsoid (in slant range geometry)
ph_flag:
restore phase slope flag (enter - for default)
* 0: no phase change
* 1: add back phase ramp (default)
loff:
offset to starting line (enter - for default: 0)
nlines:
number of lines to calculate (enter - for default: to end of file)
SLC2R_par:
(input) parameter file of resampled SLC, required if SLC2 frequency differs from SLC1 (enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/hgt_map', unw, SLC_par, OFF_par, baseline, hgt, gr, ph_flag, loff, nlines, SLC2R_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def image_stat(image, width, roff='-', loff='-', nr='-', nl='-', report='-', median_flg='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate mean, standard deviation, number of non-zero values, min, max and median for a rectangular image region (FLOAT format)
| Copyright 2025, Gamma Remote Sensing, v1.6 27-May-2025 clw/cm
Parameters
----------
image:
(input) image data file (FLOAT)
width:
image line width (samples/line)
roff:
sample offset to region start (enter - for default: 0)
loff:
line offset to region start (enter - for default: 0)
nr:
region width (samples, enter - for default: width - roff)
nl:
number of lines in the region (enter - for default: image_lines - loff)
report:
output text file (keyword:value format, enter - for none)
keywords: file, mean, stdev, total_samples, non_zero_samples, fraction_valid, min, max, median
median_flg:
median calculation flag (enter - for default)
* 0: do not calculate median
* 1: calculate median (default, memory use may be large)
* NOTE: only the non-zero samples are considered in the statistical values
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/image_stat', image, width, roff, loff, nr, nl, report, median_flg]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def init_offset(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, rlks='-', azlks='-', rpos='-', azpos='-', offr='-', offaz='-', thres='-', rwin='-', azwin='-', cflag='-', deramp='-', logpath=None, outdir=None, shellscript=None):
"""
| Determine initial offset between SLC images using correlation of image intensity
| Copyright 2023, Gamma Remote Sensing, v3.3 clw/cm 18-Apr-2023
Parameters
----------
SLC1:
(input) single-look complex image 1 (reference)
SLC2:
(input) single-look complex image 2
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
rlks:
number of range looks (enter - for default: 1)
azlks:
number of azimuth looks (enter - for default: 1)
rpos:
center of patch in range (samples) (enter - for default: image center)
azpos:
center of patch in azimuth (lines) (enter - for default: image center)
offr:
initial range offset (samples) (enter - for default: 0)
offaz:
initial azimuth offset (lines) (enter - for default: 0)
thres:
cross-correlation threshold (enter - for default: 0.150)
rwin:
range window size (enter - for default: 512)
azwin:
azimuth window size (enter - for default: 512)
cflag:
copy offsets to the range and azimuth offset polynomials in the OFF_par (enter - for default)
* 0: do not copy
* 1: copy constant range and azimuth offset (default)
deramp:
deramp SLC phase flag (enter - for default)
* 0: no deramp (Doppler centroid close to 0) (default)
* 1: deramp SLC phase
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/init_offset', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, rlks, azlks, rpos, azpos, offr, offaz, thres, rwin, azwin, cflag, deramp]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def init_offset_orbit(SLC1_par, SLC2_par, OFF_par, rpos='-', azpos='-', cflag='-', logpath=None, outdir=None, shellscript=None):
"""
| Initial SLC image offset estimation from orbit state-vectors and image parameters
| Copyright 2020, Gamma Remote Sensing, v1.9 18-Apr-2023 clw/uw/cm
Parameters
----------
SLC1_par:
(input) SLC1 parameter file
SLC2_par:
(input) SLC2 parameter file
OFF_par:
(input/output) ISP/offset parameter file
rpos:
range position for offset estimation (enter - for default: center of SLC1)
azpos:
azimuth position for offset estimation (enter - for default: center of SLC1)
cflag:
copy offsets to the range and azimuth offset polynomials in the OFF_par (enter - for default)
* 0: do not copy
* 1: copy constant range and azimuth offset (default)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/init_offset_orbit', SLC1_par, SLC2_par, OFF_par, rpos, azpos, cflag]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def interf_SLC(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, MLI1, MLI2, interf, rlks='-', azlks='-', loff='-', nltot='-', rfilt='-', azfilt='-', s_off='-', logpath=None, outdir=None, shellscript=None):
"""
| Interferogram generation using a pair of SLC images
| Copyright 2023, Gamma Remote Sensing, v5.0 clw/uw 18-Apr-2023
Parameters
----------
SLC1:
(input) single-look complex image 1 (reference)
SLC2:
(input) single-look complex image 2
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
MLI1:
(output) multi-look intensity image 1
MLI2:
(output) multi-look intensity image 2
interf:
interferogram from SLC1 and SLC2
rlks:
number of interferogram range looks (enter - for default: 2)
azlks:
number of interferogram azimuth looks (enter - for default: 10)
loff:
offset to starting line of interferogram (relative to start of SLC1) (enter - for default: 0)
nltot:
number of SLC lines to process (enter - or 0 for default: to end of file)
rfilt:
range common band filtering flag (enter - for default)
* 0: OFF
* 1: ON (default)
azfilt:
azimuth common band filtering flag (enter - for default)
* 0: OFF
* 1: ON (default)
s_off:
offset to the nominal range spectral shift (frac. of range sampling freq.) (enter - for default: 0.0)
* NOTE: enter - as filename to avoid creation of corresponding output file
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/interf_SLC', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, MLI1, MLI2, interf, rlks, azlks, loff, nltot, rfilt, azfilt, s_off]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def interp_ad(data_in, data_out, width, r_max='-', np_min='-', np_max='-', w_mode='-', dtype='-', cp_data='-', logpath=None, outdir=None, shellscript=None):
"""
| Weighted interpolation of gaps in 2D data using an adaptive smoothing window
| Copyright 2018, Gamma Remote Sensing, v2.1 13-Jun-2018 clw/uw
Parameters
----------
data_in:
(input) data with gaps
data_out:
(output) data with gaps filled by interpolation
width:
number of samples/row
r_max:
maximum interpolation window radius (default(-): 16)
np_min:
minimum number of points used for the interpolation (default(-): 16)
np_max:
maximum number of points used for the interpolation (default(-): 16)
w_mode:
data weighting mode (enter - for default):
* 0: constant
* 1: 1 - (r/r_max)
* 2: 1 - (r/r_max)\\*\\*2 (default)
* 3: exp(-2.\\*(r\\*\\*2/r_max\\*\\*2))
dtype:
input and output data type:
* 0: FCOMPLEX
* 1: SCOMPLEX
* 2: FLOAT (default)
* 3: INT
* 4: SHORT
cp_data:
copy data flag:
* 0: do not copy input data values to output
* 1: copy input data values to output (default)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/interp_ad', data_in, data_out, width, r_max, np_min, np_max, w_mode, dtype, cp_data]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def mask_data(data_in, width, data_out, mask, dtype='-', logpath=None, outdir=None, shellscript=None):
"""
| Mask float or fcomplex data using an 8-bit SUN/BMP/TIFF format raster image
| Copyright 2022, Gamma Remote Sensing, v1.6 8-Nov-2022 clw/cm
Parameters
----------
data_in:
(input) data file (FLOAT or FCOMPLEX format)
width:
width of input data file
data_out:
(output) data file, same data format as input
mask:
(input) mask file, SUN/BMP/TIFF raster format, 8-bits/pixel
output data values are set to 0.0 at all locations where the mask is black (0,0,0) or dn = 0
* NOTE: mask file must have the same width as the input data file
dtype:
data format:
* 0: FLOAT (default)
* 1: FCOMPLEX
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/mask_data', data_in, width, data_out, mask, dtype]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def mcf(interf, wgt, mask, unw, width, tri_mode='-', roff='-', loff='-', nr='-', nlines='-', npat_r='-', npat_az='-', ovrlap='-', r_init='-', az_init='-', init_flag='-', logpath=None, outdir=None, shellscript=None):
"""
| Phase unwrapping using Minimum Cost Flow (MCF) on a triangular mesh
| Copyright 2024, Gamma Remote Sensing, v2.9 clw/uw/cm 4-Apr-2024
Parameters
----------
interf:
(input) interferogram (\\*.int,\\*.diff,\\*.flt) (FCOMPLEX)
wgt:
(input) weight factors (0 -> 1.0, e.g. coherence map) file (FLOAT) (enter - for uniform weights)
mask:
(input) validity mask (SUN/BMP/TIFF raster format, value 0 -> pixel not used) (enter - if no mask)
unw:
(output) unwrapped phase image (\\*.unw) (FLOAT)
width:
number of samples/row
tri_mode:
triangulation mode (enter - for default)
* 0: filled triangular mesh
* 1: Delaunay triangulation
* 2: filled triangular mesh, replacing gaps with noise (default)
* 3: filled triangular mesh, replacing gaps and outside boundary with noise
roff:
offset to starting range of section to unwrap (enter - for default: 0)
loff:
offset to starting line of section to unwrap (enter - for default: 0)
nr:
number of range samples of section to unwrap (enter - for default: width - roff)
nlines:
number of lines of section to unwrap (enter - for default: total number of lines - loff)
npat_r:
number of patches in range (enter - for default: 1, enter 0 to automatically define number of patches)
npat_az:
number of patches in azimuth (enter - for default: 1, enter 0 to automatically define number of patches)
ovrlap:
overlap between patches in pixels (overlap >= 7, enter - for default: 1024)
r_init:
phase reference point range offset (enter - for default: center of valid data bounding box)
az_init:
phase reference point azimuth offset (enter - for default: center of valid data bounding box)
init_flag:
flag to set phase at reference point (enter - for default)
* 0: use initial point phase value (default)
* 1: set phase to 0.0 at initial point
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/mcf', interf, wgt, mask, unw, width, tri_mode, roff, loff, nr, nlines, npat_r, npat_az, ovrlap, r_init, az_init, init_flag]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def MLI_cat(MLI1, MLI2, MLI1_par, MLI2_par, MLI3, MLI3_par, dtype='-', mflg='-', overlap='-', interp_mode='-', degree='-', extrapol='-', logpath=None, outdir=None, shellscript=None):
"""
| Concatenate two MLI images using B-spline interpolation
| Copyright 2023, Gamma Remote Sensing, v2.0 18-Apr-2023 awi/cm/clw
Parameters
----------
MLI1:
(input) MLI1 image (single-look)
MLI2:
(input) MLI2 image to be appended to MLI1
MLI1_par:
(input) MLI1 ISP image parameter file
MLI2_par:
(input) MLI2 ISP image parameter file
MLI3:
(output) concatenated MLI image
MLI3_par:
(output) ISP image parameter file for concatenated image
dtype:
input/output data type (enter - for default)
* 0: FLOAT (default)
* 1: FCOMPLEX
* NOTE: FCOMPLEX is for differential interferograms
mflg:
mosaicking option flag (enter - for default)
* 0: in overlapping areas, use MLI2 data to fill MLI1 empty areas (default)
* 1: in overlapping areas, do not use MLI2 data to fill MLI1 empty areas
overlap:
number of pixels at the edge of MLI1 valid areas to replace by MLI2 data (only if mflg=0, enter - for default: 0)
interp_mode:
interpolation mode in case of different geometries (enter - for default)
* 0: B-spline interpolation (default for FCOMPLEX)
* 1: B-spline interpolation sqrt(x) (default for FLOAT)
* NOTE: sqrt interpolation mode should only be used with non-negative data!
degree:
B-spline degree (2->9) (enter - default: 4)
extrapol:
extrapolation flag (enter - for default)
* 0: do not extrapolate (default)
* 1: extrapolate last line if needed
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/MLI_cat', MLI1, MLI2, MLI1_par, MLI2_par, MLI3, MLI3_par, dtype, mflg, overlap, interp_mode, degree, extrapol]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def MLI_copy(MLI_in, MLI_in_par, MLI_out, MLI_out_par, roff='-', nr='-', loff='-', nl='-', logpath=None, outdir=None, shellscript=None):
"""
| Copy MLI data file with options for segment extraction
| Copyright 2019, Gamma Remote Sensing, v4.9 15-Oct-2019 uw/clw/cm
Parameters
----------
MLI_in:
(input) multi-look intensity image (float format)
MLI_in_par:
(input) ISP image parameter file for input MLI
MLI_out:
(output) selected MLI section (float format)
MLI_out_par:
(output) ISP image parameter file for output MLI
roff:
offset to starting range sample (enter - for default: 0)
nr:
number of range samples (enter - for default: to end of line
loff:
offset to starting line (enter - for default: 0)
nl:
number of lines to copy (enter - for default: to end of file)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/MLI_copy', MLI_in, MLI_in_par, MLI_out, MLI_out_par, roff, nr, loff, nl]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def mosaic_WB(data_tab, dtype, data_out, data_par_out, sc_flg='-', logpath=None, outdir=None, shellscript=None):
"""
| ISP: Program GAMMA_SOFTWARE-20250625/ISP/bin/mosaic_WB.c
| Copyright 2018, Gamma Remote Sensing, v1.3 26-Apr-2018 clw/cm
| Mosaic Wide-Beam ScanSAR data processed by the MSP
Parameters
----------
data_tab:
(input) 2 column list of data and ISP image parameter files for the beams in the mosaic (text)
dtype:
(input) input data type:
* 0: FLOAT
* 1: FCOMPLEX
data_out:
(output) output image mosaic
data_par_out:
(output) ISP image parameter file for output image mosaic
sc_flg:
intensity scaling flag:
* 0: do not scale different beam data values
* 1: use overlap regions to scale beam intensities (default)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/mosaic_WB', data_tab, dtype, data_out, data_par_out, sc_flg]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def multi_look(SLC, SLC_par, MLI, MLI_par, rlks, azlks, loff='-', nlines='-', scale='-', exp='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate a multi-look intensity (MLI) image from an SLC image
| Copyright 2022, Gamma Remote Sensing, v4.7 8-Aug-2022 clw/uw/cm
Parameters
----------
SLC:
(input) single-look complex image (SCOMPLEX or FCOMPLEX)
SLC_par:
(input) SLC ISP image parameter file
MLI:
(output) multi-look intensity image (FLOAT)
MLI_par:
(output) MLI ISP image parameter file
rlks:
number of range looks (INT)
azlks:
number of azimuth looks (INT)
loff:
offset to starting line (enter - for default: 0)
nlines:
number of SLC lines to process (enter - for default: entire file)
scale:
scale factor for output MLI (enter - for default: 1.0)
exp:
exponent for the output MLI (enter - for default: 1.0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_look', SLC, SLC_par, MLI, MLI_par, rlks, azlks, loff, nlines, scale, exp]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def multi_look2(SLC, SLC_par, MLI, MLI_par, r_dec, az_dec, rwin='-', azwin='-', wflg='-', n_ovr='-', lanczos='-', beta='-', scale='-', exp='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate an MLI image from an SLC with optional oversampling and separate multilooking and decimation factors
| Copyright 2024, Gamma Remote Sensing, v1.9 10-Jun-2024 clw/cm
Parameters
----------
SLC:
(input) single-look complex image (SCOMPLEX or FCOMPLEX)
SLC_par:
(input) SLC image parameter file
MLI:
(output) multi-look intensity image (FLOAT)
MLI_par:
(output) MLI image parameter file
r_dec:
range decimation factor (int)
az_dec:
azimuth decimation factor (int)
rwin:
averaging window width (int) (enter - for default: r_dec)
azwin:
averaging window height (int) (enter - for default: az_dec)
wflg:
window weighting function (enter - for default):
* 0: rectangular (default)
* 1: Kaiser
* 2: circular Gaussian
n_ovr:
oversampling factor 1 -> 2 (enter - for default: 1)
lanczos:
Lanczos interpolator order 5 -> 9 (enter - for default: 7)
beta:
Gaussian or Kaiser window parameter (enter - for default: 2.0)
scale:
scale factor for output MLI (enter - for default: 1.0)
exp:
exponent for the output MLI (enter - for default: 1.0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_look2', SLC, SLC_par, MLI, MLI_par, r_dec, az_dec, rwin, azwin, wflg, n_ovr, lanczos, beta, scale, exp]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def multi_look_MLI(MLI_in, MLI_in_par, MLI_out, MLI_out_par, rlks, azlks, loff='-', nlines='-', scale='-', e_flag='-', logpath=None, outdir=None, shellscript=None):
"""
| Multilooking (averaging and decimation) of MLI images
| Copyright 2019, Gamma Remote Sensing, v1.9 29-Oct-2019 clw/cm
Parameters
----------
MLI_in:
(input) multi-look intensity image (MLI) file (float)
MLI_in_par:
(input) MLI parameter file
MLI_out:
(output) multi-looked MLI image (float)
MLI_out_par:
(output) MLI parameter file for output MLI
rlks:
range looks for multi-looking
azlks:
azimuth looks for multi-looking
loff:
offset to starting line (enter - for default: 0)
nlines:
number of input MLI lines to process (enter - for default: entire file)
scale:
scale factor for output MLI (enter - for default: 1.0)
e_flag:
extent flag (enter - for default)
* 0: only permit pixels with the full number of looks (default)
* 1: permit pixels without the full number of looks
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_look_MLI', MLI_in, MLI_in_par, MLI_out, MLI_out_par, rlks, azlks, loff, nlines, scale, e_flag]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def multi_look_ScanSAR(SLC_tab, MLI, MLI_par, rlks, azlks, bflg='-', SLCR_tab='-', scale='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate MLI mosaic from ScanSAR SLC burst data (Sentinel-1, TerraSAR-X, RCM...)
| Copyright 2023, Gamma Remote Sensing v4.6 30-Nov-2023 awi/clw/uw/cm
Parameters
----------
SLC_tab:
(input) 3 column list of ScanSAR SLC, swaths are listed in order from near to far range
SLC_tab line entries: SLC SLC_par TOPS_par
MLI:
(output) mosaicked MLI image (non-overlapping burst windows)
MLI_par:
(output) MLI image parameter file
rlks:
number of range looks
azlks:
number of azimuth looks
bflg:
burst window calculation flag (enter - for default):
* 0: use existing burst window parameters if they exist, otherwise calculate burst window parameters (default)
* 1: calculate burst window parameters from burst parameters and the number of range and azimuth looks
SLCR_tab:
(input) 3 column list of the reference scene, swaths are listed in order from near to far range, (enter - for default: none)
SLCR_tab line entries: SLC SLC_par TOPS_par
When generating an MLI mosaic from resampled ScanSAR SLC data, the SLC_tab of the reference scene must be provided
scale:
scale factor for output MLI (enter - for default: calculate from calibration gain in SLC parameter file)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_look_ScanSAR', SLC_tab, MLI, MLI_par, rlks, azlks, bflg, SLCR_tab, scale]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def multi_real(data_in, OFF_par_in, data_out, OFF_par_out, rlks='-', azlks='-', loff='-', nlines='-', roff='-', nsamp='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate multi-look averaged or interpolated 2D image (float data)
| Copyright 2023, Gamma Remote Sensing, v2.7 19-Apr-2023 clw/uw/cm
Parameters
----------
data_in:
(input) input float image file
OFF_par_in:
(input) interferogram/offset parameter file for input image
data_out:
(output) output multi-look or interpolated float data file
OFF_par_out:
(input/output) interferogram/offset parameter file for output, if already existing, used as input
rlks:
number of range looks, values < -1, interpreted as an image oversampling factor (enter - for default: 1)
azlks:
number azimuth looks, values < -1, interpreted as an image oversampling factor (enter - for default: 1)
loff:
line offset to starting line (enter - for default: 0)
nlines:
number of lines (enter - for default: to end of file)
roff:
offset to starting range sample (enter - for default: 0)
nsamp:
number of range samples to extract (enter - for default: to end of line)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_real', data_in, OFF_par_in, data_out, OFF_par_out, rlks, azlks, loff, nlines, roff, nsamp]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def multi_SLC_WSS(SLC, SLC_par, MLI, MLI_par, logpath=None, outdir=None, shellscript=None):
"""
| Calculate multi-look intensity image (MLI) from a ASAR Wide-Swath SLC
| Copyright 2023, Gamma Remote Sensing v1.3 18-Apr-2023 clw/awi
Parameters
----------
SLC:
(input) ASAR Wide-Swath SLC image
SLC_par:
(input) ASAR Wide-Swath SLC image parameter file
MLI:
(output) multi-look intensity image
MLI_par:
(output) MLI image parameter file
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_SLC_WSS', SLC, SLC_par, MLI, MLI_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def neutron(intensity, flag, width, n_thres, ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate phase unwrapping neutrons using image intensity
| Copyright 2023, Gamma Remote Sensing, v2.4 19-Apr-2023 clw/uw
Parameters
----------
intensity:
(input) image intensity
flag:
(input) phase unwrapping flag file
width:
number of samples/row
n_thres:
neutron threshold, multiples of the average intensity (enter - for default: 6.0)
ymin:
offset to starting azimuth row (enter - for default: 0)
ymax:
offset to last azimuth row (enter - for default: nlines-1)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/neutron', intensity, flag, width, n_thres, ymin, ymax]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_add(OFF_par1, OFF_par2, OFF_par3, logpath=None, outdir=None, shellscript=None):
"""
| Add range and azimuth offset polynomial coefficients
| Copyright 2008, Gamma Remote Sensing, v1.1 12-Feb-2008 clw
Parameters
----------
OFF_par1:
(input) ISP offset/interferogram parameter file
OFF_par2:
(input) ISP offset/interferogram parameter file
OFF_par3:
(output) ISP offset/interferogram parameter file with sums of the
range and azimuth offset polynomials in OFF_par1 and OFF_par2
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_add', OFF_par1, OFF_par2, OFF_par3]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_fit(offs, ccp, OFF_par, coffs='-', coffsets='-', thres='-', npoly='-', interact_mode='-', logpath=None, outdir=None, shellscript=None):
"""
| Range and azimuth offset polynomial estimation
| Copyright 2023, Gamma Remote Sensing, v3.9 18-Apr-2023 clw/uw/cm
Parameters
----------
offs:
(input) range and azimuth offset estimates for each patch (FCOMPLEX)
ccp:
(input) cross-correlation or SNR of each patch (FLOAT)
OFF_par:
(input) ISP offset/interferogram parameter file
coffs:
(output) culled range and azimuth offset estimates (FCOMPLEX, enter - for none)
coffsets:
(output) culled offset estimates and cross-correlation values (text format, enter - for none)
thres:
cross-correlation threshold (enter - for default from OFF_par)
npoly:
number of model polynomial parameters (enter - for default, 1, 3, 4, 6, default: 4)
interact_mode:
interactive culling of input data (enter - for default)
* 0: off (default)
* 1: on
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_fit', offs, ccp, OFF_par, coffs, coffsets, thres, npoly, interact_mode]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_pwr(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, rwin='-', azwin='-', offsets='-', n_ovr='-', nr='-', naz='-', thres='-', lanczos='-', bw_frac='-', deramp='-', int_filt='-', pflag='-', pltflg='-', ccs='-', logpath=None, outdir=None, shellscript=None):
"""
| Offset estimation between SLC images using intensity cross-correlation
| Copyright 2023, Gamma Remote Sensing, v5.8 clw/cm 18-Apr-2023
Parameters
----------
SLC1:
(input) single-look complex image 1 (reference)
SLC2:
(input) single-look complex image 2
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
offs:
(output) offset estimates in range and azimuth (FCOMPLEX)
ccp:
(output) cross-correlation of each patch (0.0->1.0) (FLOAT)
rwin:
range patch size (range pixels, enter - for default from offset parameter file)
azwin:
azimuth patch size (azimuth lines, enter - for default from offset parameter file)
offsets:
(output) range and azimuth offsets and cross-correlation data in text format, enter - for no output
n_ovr:
SLC oversampling factor (integer 2\\*\\*N (1,2,4), enter - for default: 2)
nr:
number of offset estimates in range direction (enter - for default from offset parameter file)
naz:
number of offset estimates in azimuth direction (enter - for default from offset parameter file)
thres:
cross-correlation threshold (0.0->1.0) (enter - for default from offset parameter file)
lanczos:
Lanczos interpolator order 5 -> 9 (enter - for default: 5)
bw_frac:
bandwidth fraction of low-pass filter on complex data (0.0->1.0) (enter - for default: 1.0)
deramp:
deramp SLC phase flag (enter - for default)
* 0: no deramp (Doppler centroid close to 0) (default)
* 1: deramp SLC phase
int_filt:
intensity low-pass filter flag (enter - for default)
* 0: no filter
* 1: low-pass filter of intensity data, highly recommended when no oversampling used (default)
pflag:
print flag (enter - for default)
* 0: print offset summary (default)
* 1: print all offset data
pltflg:
plotting flag (enter - for default)
* 0: none (default)
* 1: screen output
* 2: screen output and PNG format plots
* 3: output plots in PDF format
ccs:
(output) cross-correlation standard deviation of each patch (FLOAT) (enter - for none)
* NOTE: ScanSAR and TOPS data need to be previously deramped
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_pwr', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, rwin, azwin, offsets, n_ovr, nr, naz, thres, lanczos, bw_frac, deramp, int_filt, pflag, pltflg, ccs]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_pwr_tracking(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, rwin='-', azwin='-', offsets='-', n_ovr='-', thres='-', rstep='-', azstep='-', rstart='-', rstop='-', azstart='-', azstop='-', lanczos='-', bw_frac='-', deramp='-', int_filt='-', pflag='-', pltflg='-', ccs='-', logpath=None, outdir=None, shellscript=None):
"""
| Offset tracking between SLC images using intensity cross-correlation
| Copyright 2023, Gamma Remote Sensing, v6.4 clw/cm 18-Apr-2023
Parameters
----------
SLC1:
(input) single-look complex image 1 (reference)
SLC2:
(input) single-look complex image 2
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
offs:
(output) offset estimates in range and azimuth (FCOMPLEX)
ccp:
(output) cross-correlation of each patch (0.0->1.0) (FLOAT)
rwin:
range patch size (range pixels, enter - for default from offset parameter file)
azwin:
azimuth patch size (azimuth lines, enter - for default from offset parameter file)
offsets:
(output) range and azimuth offsets and cross-correlation data in text format, enter - for no output
n_ovr:
SLC oversampling factor (integer 2\\*\\*N (1,2,4), enter - for default: 2)
thres:
cross-correlation threshold (0.0->1.0) (enter - for default from offset parameter file)
rstep:
step in range pixels (enter - for default: rwin/2)
azstep:
step in azimuth pixels (enter - for default: azwin/2)
rstart:
offset to starting range pixel (enter - for default: 0)
rstop:
offset to ending range pixel (enter - for default: nr-1)
azstart:
offset to starting azimuth line (enter - for default: 0)
azstop:
offset to ending azimuth line (enter - for default: nlines-1)
lanczos:
Lanczos interpolator order 5 -> 9 (enter - for default: 5)
bw_frac:
bandwidth fraction of low-pass filter on complex data (0.0->1.0) (enter - for default: 1.0)
deramp:
deramp SLC phase flag (enter - for default)
* 0: no deramp (Doppler centroid close to 0) (default)
* 1: deramp SLC phase
int_filt:
intensity low-pass filter flag (enter - for default)
* 0: no filter
* 1: low-pass filter of intensity data, highly recommended when no oversampling used (default)
pflag:
print flag (enter - for default)
* 0: print offset summary (default)
* 1: print all offset data
pltflg:
plotting flag (enter - for default)
* 0: none (default)
* 1: screen output
* 2: screen output and PNG format plots
* 3: output plots in PDF format
ccs:
(output) cross-correlation standard deviation of each patch (FLOAT) (enter - for none)
* NOTE: ScanSAR and TOPS data need to be previously deramped
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_pwr_tracking', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, rwin, azwin, offsets, n_ovr, thres, rstep, azstep, rstart, rstop, azstart, azstop, lanczos, bw_frac, deramp, int_filt, pflag, pltflg, ccs]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_pwr_tracking2(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, OFF_par2='-', offs2='-', rwin='-', azwin='-', offsets='-', n_ovr='-', thres='-', rstep='-', azstep='-', rstart='-', rstop='-', azstart='-', azstop='-', bw_frac='-', deramp='-', int_filt='-', pflag='-', pltflg='-', ccs='-', logpath=None, outdir=None, shellscript=None):
"""
| Intensity cross-correlation offset tracking with the initial offset for each patch determined from input offset map
| Copyright 2023, Gamma Remote Sensing, v2.1 clw/cm 18-Apr-2023
Parameters
----------
SLC1:
(input) single-look complex image 1 (reference)
SLC2:
(input) single-look complex image 2
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
offs:
(output) offset estimates in range and azimuth (FCOMPLEX)
ccp:
(output) cross-correlation of each patch (0.0->1.0) (FLOAT)
OFF_par2:
(input) ISP offset/interferogram parameter file of the offset map to determine initial offsets (enter - for none)
offs2:
(input) input range and azimuth offset map to determine initial offsets (enter - for none)
rwin:
range patch size (range pixels, enter - for default from offset parameter file)
azwin:
azimuth patch size (azimuth lines, enter - for default from offset parameter file)
offsets:
(output) range and azimuth offsets and cross-correlation data in text format, enter - for no output
n_ovr:
SLC oversampling factor (integer 2\\*\\*N (1,2,4), enter - for default: 2)
thres:
cross-correlation threshold (0.0->1.0) (enter - for default from offset parameter file)
rstep:
step in range pixels (enter - for default: rwin/2)
azstep:
step in azimuth pixels (enter - for default: azwin/2)
rstart:
offset to starting range pixel (enter - for default: 0)
rstop:
offset to ending range pixel (enter - for default: nr-1)
azstart:
offset to starting azimuth line (enter - for default: 0)
azstop:
offset to ending azimuth line (enter - for default: nlines-1)
bw_frac:
bandwidth fraction of low-pass filter on complex data (0.0->1.0) (enter - for default: 1.0)
deramp:
deramp SLC phase flag (enter - for default)
* 0: no deramp (Doppler centroid close to 0) (default)
* 1: deramp SLC phase
int_filt:
intensity low-pass filter flag (enter - for default)
* 0: no filter
* 1: low-pass filter of intensity data, highly recommended when no oversampling used (default)
pflag:
print flag (enter - for default)
* 0: print offset summary (default)
* 1: print all offset data
pltflg:
plotting flag (enter - for default)
* 0: none (default)
* 1: screen output
* 2: screen output and PNG format plots
* 3: output plots in PDF format
ccs:
(output) cross-correlation standard deviation of each patch (FLOAT) (enter - for none)
* NOTE: ScanSAR and TOPS data need to be previously deramped
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_pwr_tracking2', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, OFF_par2, offs2, rwin, azwin, offsets, n_ovr, thres, rstep, azstep, rstart, rstop, azstart, azstop, bw_frac, deramp, int_filt, pflag, pltflg, ccs]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_pwr_tracking_polygons(SLC_par, OFF_par, rlks, azlks, rwin, azwin, polygons, rstep='-', azstep='-', rstart='-', rstop='-', azstart='-', azstop='-', rb='-', azb='-', logpath=None, outdir=None, shellscript=None):
"""
| Offset tracking polygon calculation in MLI coordinates
| Copyright 2023, Gamma Remote Sensing, v1.2 18-Apr-2023 cw
Parameters
----------
SLC_par:
(input) reference SLC ISP image parameter file
OFF_par:
(input/output) ISP offset/interferogram parameter file
rlks:
range decimation factor for MLI geometry (enter - for default: 1)
azlks:
azimuth decimation factor for the MLI geometry (enter - for default: 1)
rwin:
range patch size (range pixels, enter - for default from offset parameter file)
azwin:
azimuth patch size (azimuth lines, enter - for default from offset parameter file)
polygons:
(output) polygon vertices in text format
rstep:
step in range pixels (enter - for default: rwin/2)
azstep:
step in azimuth pixels (enter - for default: azwin/2)
rstart:
offset to starting range pixel (enter - for default: 0)
rstop:
offset to ending range pixel (enter - for default: nr-1)
azstart:
offset to starting azimuth line (enter - for default: 0)
azstop:
offset to ending azimuth line (enter - for default: nlines-1)
rb:
polygon range border in MLI samples: (enter - for default: 7)
azb:
polygon azimuth border in MLI lines: (enter - for default: 7)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_pwr_tracking_polygons', SLC_par, OFF_par, rlks, azlks, rwin, azwin, polygons, rstep, azstep, rstart, rstop, azstart, azstop, rb, azb]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_SLC(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, snr, rwin='-', azwin='-', offsets='-', n_ovr='-', nr='-', naz='-', thres='-', ISZ='-', pflag='-', logpath=None, outdir=None, shellscript=None):
"""
| Offsets between SLC images using fringe visibility
| Copyright 2023, Gamma Remote Sensing, v3.1 18-Apr-2023 clw
Parameters
----------
SLC1:
(input) single-look complex image 1 (reference)
SLC2:
(input) single-look complex image 2
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
offs:
(output) offset estimates (FCOMPLEX)
snr:
(output) offset estimation SNR (FLOAT)
rwin:
search window size (range pixels) (enter - for default from offset parameter file)
azwin:
search window size (azimuth lines) (enter - for default from offset parameter file)
offsets:
(output) range and azimuth offsets and SNR data in text format, enter - for no output
n_ovr:
SLC oversampling factor (integer 2\\*\\*N (1,2,4) enter - for default: 2)
nr:
number of offset estimates in range direction (enter - for default from offset parameter file)
naz:
number of offset estimates in azimuth direction (enter - for default from offset parameter file)
thres:
offset estimation quality threshold (enter - for default from offset parameter file)
ISZ:
search chip interferogram size (in non-oversampled pixels, enter - for default: 16)
pflag:
print flag (enter - for default)
* 0: print offset summary (default)
* 1: print all offset data
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_SLC', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, snr, rwin, azwin, offsets, n_ovr, nr, naz, thres, ISZ, pflag]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_SLC_tracking(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, snr, rsw='-', azsw='-', offsets='-', n_ovr='-', thres='-', rstep='-', azstep='-', rstart='-', rstop='-', azstart='-', azstop='-', ISZ='-', pflag='-', logpath=None, outdir=None, shellscript=None):
"""
| Offset tracking between SLC images using fringe visibility
| Copyright 2023, Gamma Remote Sensing, v3.8 18-Apr-2023 clw
Parameters
----------
SLC1:
(input) single-look complex image 1 (reference)
SLC2:
(input) single-look complex image 2
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
offs:
(output) offset estimates (FCOMPLEX)
snr:
(output) offset estimation SNR (FLOAT)
rsw:
range search window size (range pixels) (enter - for default from offset parameter file)
azsw:
azimuth search window size (azimuth lines) (enter - for default from offset parameter file)
offsets:
(output) range and azimuth offsets and SNR data in text format, enter - for no output
n_ovr:
SLC over-sampling factor (integer 2\\*\\*N (1,2,4) enter - for default: 2)
thres:
offset estimation quality threshold (enter - for default from offset parameter file)
rstep:
step in range pixels (enter - for default: rsw/2)
azstep:
step in azimuth pixels (enter - for default: azsw/2)
rstart:
starting range pixel (enter - for default: rsw/2)
rstop:
ending range pixel (enter - for default: nr - rsw/2)
azstart:
starting azimuth line (enter - for default: azsw/2)
azstop:
ending azimuth line (enter - for default: nlines - azsw/2)
ISZ:
search chip interferogram size (in non-oversampled pixels, enter - for default: 16)
pflag:
print flag (enter - for default)
* 0: print offset summary (default)
* 1: print all offset data
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_SLC_tracking', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, snr, rsw, azsw, offsets, n_ovr, thres, rstep, azstep, rstart, rstop, azstart, azstop, ISZ, pflag]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_sub(offs, OFF_par, offs_sub, logpath=None, outdir=None, shellscript=None):
"""
| Subtraction of polynomial from range and azimuth offset estimates
| Copyright 2017, Gamma Remote Sensing, v1.0 27-Mar-2017 cm
Parameters
----------
offs:
(input) range and azimuth offset estimates (fcomplex)
OFF_par:
(input) ISP offset/interferogram parameter file
offs_sub:
(output) range and azimuth offset estimates after polynomial subtraction (fcomplex)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_sub', offs, OFF_par, offs_sub]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_tracking(offs, ccp, SLC_par, OFF_par, disp_map, disp_val='-', mode='-', thres='-', poly_flag='-', logpath=None, outdir=None, shellscript=None):
"""
| Conversion of range and azimuth offsets files to displacement map
| Copyright 2017, Gamma Remote Sensing, v2.0 4-Apr-2017 ts/clw/uw
Parameters
----------
offs:
(input) range and azimuth offset estimates (fcomplex)
ccp:
(input) cross-correlation of the offset estimates (float)
SLC_par:
(input) SLC parameter file of reference SLC
OFF_par:
(input) offset parameter file used in the offset tracking
disp_map:
(output) range and azimuth displacement estimates (fcomplex)
disp_val:
(output) range and azimuth displacement estimates and cross-correlation values (enter - for none) (text)
mode:
flag indicating displacement mode:
* 0: displacement in range and azimuth pixels
* 1: displacement in meters in slant range and azimuth directions
* 2: displacement in meters in ground range and azimuth directions (default)
thres:
cross-correlation threshold to accept offset value (default from OFF_par)
poly_flag:
flag indicating if trend calculated using offset polynomials from OFF_par is subtracted:
* 0: do not subtract polynomial trend from offset data
* 1: subtract polynomial trend from offset data (default)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_tracking', offs, ccp, SLC_par, OFF_par, disp_map, disp_val, mode, thres, poly_flag]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ORB_filt(SLC_par_in, SLC_par_out, interval='-', extra='-', logpath=None, outdir=None, shellscript=None):
"""
| Filter state vectors using a least-squares polynomial model
| Copyright 2020, Gamma Remote Sensing, v1.3 20-May-2020 clw/cm
Parameters
----------
SLC_par_in:
(input) ISP image parameter file at least 5 state vectors
SLC_par_out:
(output) ISP image parameter file with state vectors filtered using least-squares
interval:
time interval between state vectors (enter - for default: state vector time interval in SLC_par)
extra:
extra time for state vectors at start and end of image (sec.) (enter - for default: 5.0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ORB_filt', SLC_par_in, SLC_par_out, interval, extra]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ORB_prop_SLC(SLC_par, nstate='-', interval='-', extra='-', mode='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate state vectors using orbit propagation and interpolation
| Copyright 2022, Gamma Remote Sensing, v2.0 1-Feb-2022 clw/awi/cm
Parameters
----------
SLC_par:
(input) ISP image parameter file with at least 1 state vector
nstate:
number of state vectors to calculate (enter - for default: nstate from image duration + extra)
interval:
time interval between state vectors (enter - for default: state vector time interval in SLC_par)
extra:
extra time for state vectors at start and end of image (sec.) (enter - for default: 30.0)
mode:
orbit propagation mode:
* 0: polynomial interpolation (default, if 3 or more state vectors available)
* 1: integration of the equations of motion (default, if less than 3 state vectors available)
* 2: interpolate between state vectors, minimum of 3 state vectors;
interpolation of the equations of motion outside of the time span of the existing state vectors
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ORB_prop_SLC', SLC_par, nstate, interval, extra, mode]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ORRM_vec(SLC_par, ORRM, nstate='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate state vectors extraction from ORRM file
| Copyright 2023, Gamma Remote Sensing, v1.5 19-Apr-2023 clw
Parameters
----------
SLC_par:
(input/output) ISP SLC/MLI image parameter file
ORRM:
(input) ORRM state vector file
nstate:
number of state vectors (enter - for default: 5, maximum: 1024)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ORRM_vec', SLC_par, ORRM, nstate]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ACS_ERS(CEOS_SAR_leader, SLC_par, logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file generation for ERS SLC data from the ACS processor
| Copyright 2020, Gamma Remote Sensing, v1.4 3-Sep-2020 clw/uw/cm
Parameters
----------
CEOS_SAR_leader:
(input) ERS CEOS SAR leader file
SLC_par:
(output) ISP SLC parameter file (example .slc.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ACS_ERS', CEOS_SAR_leader, SLC_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ASAR(ASAR_ERS_file, output_name, K_dB='-', logpath=None, outdir=None, shellscript=None):
"""
| Extract SLC/MLI image parameters and images from ENVISAT ASAR SLC, WSS, APP, and PRI products
| Copyright 2023, Gamma Remote Sensing, v2.9 20-Oct-2023 clw/uw/awi/cm
Parameters
----------
ASAR_ERS_file:
(input) ASAR or ERS data in ASAR format (SAR_IMS_1P) including header and image as provided by ESA
output_name:
(output) common part of output file names (e.g. YYYMMDD date)
K_dB:
Calibration factor in dB (nominal value for all ASAR modes: 55.0)
* NOTE: Use - to use the calibration factor provided in the ASAR file header
* NOTE: In the case that a calibration factor is specified on the command line, PRI images are converted
to radiometrically calibrated ground-range intensity images in float format
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASAR', ASAR_ERS_file, output_name, K_dB]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ASF_91(CEOS_leader, CEOS_trailer, SLC_par, logpath=None, outdir=None, shellscript=None):
"""
| SLC parameter file for data data from theAlaska SAR Facility (1991-1996)
| Copyright 2020, Gamma Remote Sensing, v3.4 3-Sep-2020 clw/uw/cm
Parameters
----------
CEOS_leader:
(input) ASF CEOS leader file
CEOS_trailer:
(input) ASF CEOS trailer file
SLC_par:
(output) ISP SLC image parameter file
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASF_91', CEOS_leader, CEOS_trailer, SLC_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ASF_96(CEOS_SAR_leader, SLC_par, logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file for ASF data 1996-->present v1.1
| Copyright 2020, Gamma Remote Sensing, v1.4 3-Sep-2020 clw/uw/cm
Parameters
----------
CEOS_SAR_leader:
(input) CEOS SAR leader file
SLC_par:
(output) ISP SLC parameter file (example .slc.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASF_96', CEOS_SAR_leader, SLC_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ASF_PRI(CEOS_leader, CEOS_data, GRD_par, GRD, logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file for ASF detected ground range images (L1) Sep 1996 --> present
| Copyright 2021, Gamma Remote Sensing, v1.5 14-Jun-2021 clw/uw/cm
Parameters
----------
CEOS_leader:
(input) CEOS leader file
CEOS_data:
(input) CEOS data file binary
GRD_par:
(output) ISP ground range image parameter file
GRD:
(output) ISP ground range image (enter - for none, FLOAT intensity)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASF_PRI', CEOS_leader, CEOS_data, GRD_par, GRD]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ASF_RSAT_SS(CEOS_leader, CEOS_data, GRD_par, GRD, logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file for ASF Radarsat-1 SCANSAR images
| Copyright 2020, Gamma Remote Sensing, v1.1 3-Sep-2020 clw/uw/cm
Parameters
----------
CEOS_leader:
(input) CEOS leader file (Radarsat-1 SCANSAR)
CEOS_data:
(input) CEOS data file (Radarsat-1 SCANSAR)
GRD_par:
(output) ISP image parameter file (example .mli.par)
GRD:
(output) ISP image (example .mli) (enter - for none, short integer)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASF_RSAT_SS', CEOS_leader, CEOS_data, GRD_par, GRD]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ASF_SLC(CEOS_leader, SLC_par, CEOS_data='-', SLC='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC image parameter file and reformat data
| Copyright 2023, Gamma Remote Sensing, v1.1 18-Apr-2023 clw/uw
Parameters
----------
CEOS_leader:
(input) CEOS SAR leader file
SLC_par:
(output) ISP SLC parameter file (example .slc.par)
CEOS_data:
(input) CEOS data file (example: dat_01.001) (enter - for none)
SLC:
(output) SLC data with file and line headers removed (example: .slc) (enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASF_SLC', CEOS_leader, SLC_par, CEOS_data, SLC]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ASNARO2(CEOS_data, CEOS_leader, SLC_par, SLC='-', reramp='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for ASNARO-2 Spotlight, Stripmap and ScanSAR level 1.1 data
| Copyright 2023, Gamma Remote Sensing, v1.4 15-Jun-2023 cm/uw
Parameters
----------
CEOS_data:
(input) CEOS format SLC data (IMG-PP-AS2\\*)
CEOS_leader:
(input) CEOS SAR leader file for ASNARO-2 data (LED-AS2\\*)
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd_pp.slc.par)
SLC:
(output) SLC (Spotlight and Stripmap) or SLI (ScanSAR) data file (enter - for none, example: yyyymmdd_pp.slc)
reramp:
reramp SLC phase flag (enter - for default)
* 0: no reramp
* 1: reramp SLC phase (default)
* NOTE: ASNARO2 geocoded and georeferenced data in GeoTIFF format (level 1.5) can be read using par_ASNARO2_geo program.
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASNARO2', CEOS_data, CEOS_leader, SLC_par, SLC, reramp]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ATLSCI_ERS(CEOS_SAR_leader, CEOS_Image, SLC_par, logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file for ATL-SCI ERS SLC data
| Copyright 2020, Gamma Remote Sensing, v2.9 21-Sep-2020 clw/cm
Parameters
----------
CEOS_SAR_leader:
(input) CEOS SAR leader file (LEA_01.001)
CEOS_Image:
(input) CEOS image data segment (DAT_01.001)
SLC_par:
(output) ISP SLC parameter file (example .slc.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ATLSCI_ERS', CEOS_SAR_leader, CEOS_Image, SLC_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_Capella_SLC(GeoTIFF, ext_JSON, SLC_par, SLC='-', radcal='-', noise='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for Capella SLC data
| Copyright 2025, Gamma Remote Sensing, v2.0 28-Apr-2025 cm
Parameters
----------
GeoTIFF:
(input) Capella image data file in GeoTIFF format (\\*.tif)
ext_JSON:
(input) Capella extended metadata file in JSON format (\\*_extended.json)
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd.slc.par)
SLC:
(output) SLC data file (enter - for none, example: yyyymmdd.slc)
radcal:
radiometric calibration flag (enter - for default)
* 0: beta0 (default)
* 1: sigma0
noise:
noise levels flag (enter - for default)
* 0: do not use noise levels (default)
* 1: use noise levels
* NOTE: Capella terrain geocoded data in GeoTIFF format can be read using par_Capella_geo program
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_Capella_SLC', GeoTIFF, ext_JSON, SLC_par, SLC, radcal, noise]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_CS_DGM(HDF5, trunk, logpath=None, outdir=None, shellscript=None):
"""
| Generate ISP MLI parameter and image files for COSMO-Skymed DGM data
| Copyright 2024, Gamma Remote Sensing, v1.1 12-Sep-2024 cm/awi/ms/cw/uw
Parameters
----------
HDF5:
(input) COSMO-Skymed DGM data file in HDF5 format
trunk:
(output) output file name trunk used for output filenames
(example: yyyymmdd -> yyyymmdd_pol.mli yyyymmdd_pol.mli.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_CS_DGM', HDF5, trunk]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_CS_SLC(HDF5, trunk, logpath=None, outdir=None, shellscript=None):
"""
| Generate ISP SLC parameter and image files for Cosmo-Skymed SCS data
| Copyright 2024, Gamma Remote Sensing, v2.2 12-Sep-2024 awi/ms/cw/uw
Parameters
----------
HDF5:
(input) SCS data file in HDF5 format
trunk:
(output) output file name trunk used for output filenames
(example: yyyymmdd -> yyyymmdd_pol_beamid.slc yyyymmdd_pol_beamid.slc.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_CS_SLC', HDF5, trunk]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_CS_SLC_TIF(GeoTIFF, XML, trunk, logpath=None, outdir=None, shellscript=None):
"""
| Generate ISP SLC parameter and image files for Cosmo Skymed SCS data in GeoTIFF format
| Copyright 2023, Gamma Remote Sensing, v1.6 16-May-2023 awi/ms/clw/cm
Parameters
----------
GeoTIFF:
(input) SCS data file in GeoTIFF format
XML:
(input) SCS meta data file in XML format
trunk:
(output) output file name trunk used for output filenames
(example: yyyymmdd -> yyyymmdd_pol_beamid.slc yyyymmdd_pol_beamid.slc.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_CS_SLC_TIF', GeoTIFF, XML, trunk]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_CSG_DGM(HDF5, trunk, logpath=None, outdir=None, shellscript=None):
"""
| Generate ISP MLI parameter and image files for COSMO-Skymed Second Generation DGM data
| Copyright 2024, Gamma Remote Sensing, v1.1 12-Sep-2024 cm/awi/ms/cw/uw
Parameters
----------
HDF5:
(input) COSMO-Skymed Second Generation DGM data file in HDF5 format
trunk:
(output) output file name trunk used for output filenames
(example: yyyymmdd -> yyyymmdd_pol.mli yyyymmdd_pol.mli.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_CSG_DGM', HDF5, trunk]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_CSG_SLC(HDF5, trunk, logpath=None, outdir=None, shellscript=None):
"""
| Generate ISP SLC parameter and image files for COSMO-Skymed Second Generation SCS data
| Copyright 2024, Gamma Remote Sensing, v1.3 12-Sep-2024 cm/awi/ms/cw/uw
Parameters
----------
HDF5:
(input) SCS data file in HDF5 format
trunk:
(output) output file name trunk used for output filenames
(example: yyyymmdd -> yyyymmdd_pol_beamid.slc yyyymmdd_pol_beamid.slc.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_CSG_SLC', HDF5, trunk]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_EORC_JERS_SLC(CEOS_SAR_leader, SLC_par, CEOS_data='-', SLC='-', logpath=None, outdir=None, shellscript=None):
"""
| Reformat EORC processed JERS-1 SLC and generate the ISP parameter file
| Copyright 2023, Gamma Remote Sensing, v1.6 18-Apr-2023 clw/cm
Parameters
----------
CEOS_SAR_leader:
(input) CEOS SAR leader file for JERS SLC processed by EORC
SLC_par:
(output) ISP image parameter file
CEOS_data:
(input) CEOS format SLC data (IMOP_01.DAT, enter - for none)
SLC:
(output) reformated JERS SLC (example: yyyymmdd.SLC, enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_EORC_JERS_SLC', CEOS_SAR_leader, SLC_par, CEOS_data, SLC]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_EORC_PALSAR(CEOS_leader, SLC_par, CEOS_data, SLC='-', dtype='-', sc_dB='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC image and parameter files for PALSAR, PALSAR-2, and PALSAR-3 level 1.1 SLC data produced by EORC/JAXA and ESA
| Copyright 2025, Gamma Remote Sensing, v3.9 12-Jun-2025 clw/cm
Parameters
----------
CEOS_leader:
(input) CEOS leader file for PALSAR, PALSAR-2, or PALSAR-3 Level 1.1 SLC data (LED...)
SLC_par:
(output) ISP image parameter file (example: yyyymmdd.slc.par)
CEOS_data:
(input) PALSAR CEOS format Level 1.1 SLC (IMG...)
SLC:
(output) reformatted PALSAR SLC (example: yyyymmdd.slc, enter - for none)
dtype:
output data type (enter - for default)
* 0: FCOMPLEX (default)
* 1: SCOMPLEX
sc_dB:
scale factor for FCOMPLEX -> SCOMPLEX, (enter - for default: HH,VV (dB): 60.0000, VH,HV: 70.0000)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_EORC_PALSAR', CEOS_leader, SLC_par, CEOS_data, SLC, dtype, sc_dB]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_EORC_PALSAR_ScanSAR(CEOS_data, CEOS_leader, SLC_par, SLC='-', TOPS_par='-', afmrate='-', shift='-', reramp='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files from EORC PALSAR2 ScanSAR burst SLC data in CEOS format
| Copyright 2023, Gamma Remote Sensing, v1.4 18-Apr-2023 cm/clw
Parameters
----------
CEOS_data:
(input) CEOS image file for a PALSAR2 ScanSAR burst data subswath (IMG...)
CEOS_leader:
(input) CEOS leader file for PALSAR2 ScanSAR burst data (LED...)
SLC_par:
(output) ISP image parameter file (example: yyyymmdd_b1_hh.slc.par)
SLC:
(output) SLC data file (enter - for none, example: yyyymmdd_b1_hh.slc)
TOPS_par:
(output) SLC burst annotation file (enter - for none, example: yyyymmdd_b1_hh.slc.tops_par)
afmrate:
azimuth FM rate estimation method (enter - for default)
* 0: beam velocity on the ground
* 1: platform velocity (default)
shift:
shift azimuth spectrum by fs/2 (enter - for default)
* 0: no
* 1: yes (default)
reramp:
reramp data using Doppler centroid and azimuth FM rate estimate (enter - for default)
* 0: no
* 1: yes (default)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_EORC_PALSAR_ScanSAR', CEOS_data, CEOS_leader, SLC_par, SLC, TOPS_par, afmrate, shift, reramp]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ERSDAC_PALSAR(ERSDAC_SLC_par, SLC_par, logpath=None, outdir=None, shellscript=None):
"""
| Generate the ISP image parameter file from ERSDAC PALSAR level 1.1 SLC data
| Copyright 2023, Gamma Remote Sensing, v1.7 5-Jun-2023 clw
Parameters
----------
ERSDAC_SLC_par:
(input) ERSDAC SLC parameter file Level 1.1 (PASL11\\*.SLC.par)
SLC_par:
(output) ISP image parameter file (example: yyyymmdd.slc.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ERSDAC_PALSAR', ERSDAC_SLC_par, SLC_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ESA_ERS(CEOS_SAR_leader, SLC_par, inlist, CEOS_DAT='-', SLC='-', logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file generation for ERS SLC data from the PGS, VMP, and SPF processors
| Copyright 2020, Gamma Remote Sensing, v1.5 21-Sep-2020 clw/uw/cm
Parameters
----------
CEOS_SAR_leader:
(input) ERS CEOS SAR leader file
SLC_par:
(output) ISP SLC parameter file (example: .slc.par)
inlist:
a list of arguments to be passed to stdin
CEOS_DAT:
(input) CEOS data file (example: DAT_01.001)
SLC:
(output) SLC data with file and line headers removed (example: .slc)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ESA_ERS', CEOS_SAR_leader, SLC_par, CEOS_DAT, SLC]
process(cmd, logpath=logpath, outdir=outdir, inlist=inlist, shellscript=shellscript)
def par_ESA_JERS_SEASAT_SLC(CEOS_data, CEOS_leader, SLC_par, SLC='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for ESA-provided JERS and SEASAT SLC data
| Copyright 2023, Gamma Remote Sensing, v1.4 15-Jun-2023 cm/clw/ts
Parameters
----------
CEOS_data:
(input) CEOS format SLC data (DAT_01.001)
CEOS_leader:
(input) CEOS SAR leader file for JERS SLC processed by ESA (LEA_01.001)
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd.slc.par)
SLC:
(output) SLC data file (enter - for none, example: yyyymmdd.slc)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ESA_JERS_SEASAT_SLC', CEOS_data, CEOS_leader, SLC_par, SLC]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ESA_PALSAR_GDH(CEOS_data, CEOS_leader, MLI_par, MLI='-', GRD_par='-', GRD='-', rps='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate MLI and GRD image and parameter files for PALSAR + PALSAR2 level 1.5 GDH data provided by ESA
| Copyright 2023, Gamma Remote Sensing, v1.4 5-Jun-2023 clw/cm
Parameters
----------
CEOS_data:
(input) CEOS image file for PALSAR or PALSAR-2 Level 1.5 GDH data (IMG...)
CEOS_leader:
(input) CEOS leader file for PALSAR or PALSAR-2 Level 1.5 GDH data (LED...)
MLI_par:
(output) MLI parameter file (example: yyyymmdd_pp.mli.par)
MLI:
(output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)
GRD_par:
(output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)
GRD:
(output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)
rps:
slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ESA_PALSAR_GDH', CEOS_data, CEOS_leader, MLI_par, MLI, GRD_par, GRD, rps]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_Fucheng_SLC(GeoTIFF, annotation_XML, calibration_XML, noise_XML, SLC_par, SLC='-', dtype='-', radcal='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for Spacety Fucheng SLC data
| Copyright 2024, Gamma Remote Sensing, v1.1 7-Jun-2024 cm/clw/awi
Parameters
----------
GeoTIFF:
(input) image data file in \\*.tiff GeoTIFF format (enter - for default: none)
annotation_XML:
(input) Fucheng XML annotation file
calibration_XML:
(input) Fucheng radiometric calibration XML file to generate output as sigma0
(enter - for default: return uncalibrated digital numbers)
noise_XML:
(input) Fucheng noise XML file (enter - for default: no subtraction of thermal noise power)
SLC_par:
(output) SLC parameter file (e.g.: yyyymmdd_vv.slc.par)
SLC:
(output) SLC data file (enter - for default: none, e.g.: yyyymmdd_vv.slc)
dtype:
output data type (enter - for default)
* 0: FCOMPLEX (default)
* 1: SCOMPLEX
radcal:
radiometric calibration flag (enter - for default)
* 0: none
* 1: Beta Nought
* 2: Sigma Nought (default)
* 3: Gamma
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_Fucheng_SLC', GeoTIFF, annotation_XML, calibration_XML, noise_XML, SLC_par, SLC, dtype, radcal]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_GF3_SLC(GeoTIFF, annotation_XML, SLC_par, SLC='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter file and SLC image from a Gaofen-3 data set in GeoTIFF format
| Copyright 2023, Gamma Remote Sensing, v1.3 14-Jun-2023 cm
Parameters
----------
GeoTIFF:
(input) Gaofen-3 data file in GeoTIFF format (\\*.tiff) (enter - for none)
annotation_XML:
(input) Gaofen-3 annotation file in XML format (\\*.meta.xml)
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd.slc.par)
SLC:
(output) ISP SLC data file (example: yyyymmdd.slc) (enter - for none, SLC output will not be produced)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_GF3_SLC', GeoTIFF, annotation_XML, SLC_par, SLC]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_HISEA1_SLC(GeoTIFF, annotation_XML, calibration_XML, SLC_par, SLC='-', dtype='-', sc_dB='-', shift='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for Hisea-1 SLC data
| Copyright 2023, Gamma Remote Sensing, v1.4 11-May-2023 awi/cm
Parameters
----------
GeoTIFF:
(input) image data file in GeoTIFF format (enter - for none, \\*.tiff)
annotation_XML:
(input) Hisea-1 L1 XML annotation file
calibration_XML:
(input) Hisea-1 L1 radiometric calibration XML file (enter - for no radiometric calibration)
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd_vv.slc.par)
SLC:
(output) SLC data file (enter - for none, example: yyyymmdd_vv.slc)
dtype:
output data type (enter - for default)
* 0: FCOMPLEX (default)
* 1: SCOMPLEX
sc_dB:
scale factor for FCOMPLEX -> SCOMPLEX, (enter - for default: HH,VV (dB): 60.0000, VH,HV: 70.0000)
shift:
shift azimuth spectrum by fs/2 (enter - for default)
* 0: no
* 1: yes (default)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_HISEA1_SLC', GeoTIFF, annotation_XML, calibration_XML, SLC_par, SLC, dtype, sc_dB, shift]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_HT1_SLC(GeoTIFF, annotation_XML, SLC_par, SLC='-', dtype='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for HT1 / Hongtu-1 / PIESAT-1 SLC data
| Copyright 2024, Gamma Remote Sensing, v1.0 5-Jun-2024 cm/clw/awi
Parameters
----------
GeoTIFF:
(input) image data file in \\*.tiff GeoTIFF format (enter - for default: none)
annotation_XML:
(input) HT1 XML annotation file
SLC_par:
(output) SLC parameter file (e.g.: yyyymmdd_vv.slc.par)
SLC:
(output) SLC data file (enter - for default: none, e.g.: yyyymmdd_vv.slc)
dtype:
output data type (enter - for default)
* 0: FCOMPLEX (default)
* 1: SCOMPLEX
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_HT1_SLC', GeoTIFF, annotation_XML, SLC_par, SLC, dtype]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ICEYE_GRD(GeoTIFF, XML, MLI_par, MLI='-', GRD_par='-', GRD='-', rps='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate MLI and GRD image and parameter files for ICEYE GRD data
| Copyright 2024, Gamma Remote Sensing, v1.4 13-Jun-2024 cm
Parameters
----------
GeoTIFF:
(input) ICEYE GRD data file in GeoTIFF format (enter - for none, \\*.tif)
XML:
(input) ICEYE XML annotation file
MLI_par:
(output) MLI parameter file (example: yyyymmdd.mli.par)
MLI:
(output) MLI data file in slant range geometry (example: yyyymmdd.mli, enter - for none)
GRD_par:
(output) GRD parameter file (example: yyyymmdd.grd.par, enter - for none)
GRD:
(output) GRD data file (example: yyyymmdd.grd, enter - for none)
rps:
slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ICEYE_GRD', GeoTIFF, XML, MLI_par, MLI, GRD_par, GRD, rps]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_ICEYE_SLC(HDF5, SLC_par, SLC='-', dtype='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate ISP SLC parameter and binary files for ICEYE SLC data
| Copyright 2024, Gamma Remote Sensing, v1.9 28-Oct-2024 cm
Parameters
----------
HDF5:
(input) ICEYE SLC data file in HDF5 format
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd.slc.par)
SLC:
(output) SLC data file (enter - for none, example: yyyymmdd.slc)
dtype:
output data type (enter - for default: same as input)
* 0: FCOMPLEX
* 1: SCOMPLEX
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ICEYE_SLC', HDF5, SLC_par, SLC, dtype]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_IECAS_SLC(aux_data, slc_Re, slc_Im, date, SLC_par, SLC, logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for IECAS SLC data
| Copyright 2023, Gamma Remote Sensing, v1.3 18-Apr-2023
Parameters
----------
aux_data:
(input) IECAS SAR auxillary data (POS\\*.dat)
slc_Re:
(input) real part of complex SLC data
slc_Im:
(input) imaginary part of complex SLC data
date:
(input) acquistion date format: YYYYMMDD (example 20110121) from aux_data filename
SLC_par:
(output) ISP SLC parameter file
SLC:
(output) SLC image
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_IECAS_SLC', aux_data, slc_Re, slc_Im, date, SLC_par, SLC]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_KC_PALSAR_slr(facter_m, CEOS_leader, SLC_par, pol, pls_mode, KC_data, pwr='-', fdtab='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate ISP parameter file, Doppler table, and images for PALSAR KC Slant-Range data
| Copyright 2023, Gamma Remote Sensing, v2.3 5-Jun-2023 ms/awi/clw/cm
Parameters
----------
facter_m:
(input) PALSAR Kyoto-Carbon parameter file
CEOS_leader:
(input) PALSAR Kyoto-Carbon leader file (LED)
SLC_par:
(output) ISP image parameter file (example: yyyymmdd_pp.mli.par)
pol:
polarization e.g. HH or HV
pls_mode:
PALSAR acquisition mode:
* 1: Fine Beam Single
* 2: Fine Beam Double
* 3: Wide Beam
KC_data:
(input) PALSAR Kyoto-Carbon data (named sar_Q\\*.dat_\\*)
pwr:
(output) PALSAR Kyoto-Carbon data strip expressed as SAR intensity (enter - for none, example: yyyymmdd_pp.mli)
fdtab:
(output) table of output polynomials, one polynomial/block used as input to gc_map_fd (enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_KC_PALSAR_slr', facter_m, CEOS_leader, SLC_par, pol, pls_mode, KC_data, pwr, fdtab]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_KS_DGM(HDF5, trunk, logpath=None, outdir=None, shellscript=None):
"""
| Generate ISP SLC parameter and PRI image files for Kompsat DGM data
| Copyright 2023, Gamma Remote Sensing, v1.4 13-Jul-2023 awi/cm
Parameters
----------
HDF5:
(input) DGM data file in HDF5 format
trunk:
(output) output file name trunk used for output filenames
(example: yyyymmdd -> yyyymmdd_pol_beamid.slc yyyymmdd_pol_beamid.pri.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_KS_DGM', HDF5, trunk]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_KS_SLC(HDF5, trunk, logpath=None, outdir=None, shellscript=None):
"""
| Generate ISP SLC parameter and image files for Kompsat SCS data
| Copyright 2023, Gamma Remote Sensing, v1.7 13-Jul-2023 awi/clw/cm
Parameters
----------
HDF5:
(input) SCS data file in HDF5 format
trunk:
(output) output file name trunk used for output filenames
(example: yyyymmdd -> yyyymmdd_pol_beamid.slc yyyymmdd_pol_beamid.slc.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_KS_SLC', HDF5, trunk]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_LT1_SLC(GeoTIFF, annotation_XML, SLC_par, SLC='-', dtype='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter file and SLC image from a LT-1 data set
| Copyright 2024, Gamma Remote Sensing, v1.3 17-Jul-2024 awi/cm
Parameters
----------
GeoTIFF:
(input) image data file in GeoTIFF format (enter - for none, \\*.tiff)
annotation_XML:
(input) LT-1 product annotation XML file (\\*.meta.xml)
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd.slc.par)
SLC:
(output) SLC data file, example: yyyymmdd.slc (enter - for none, SLC output will not be produced)
dtype:
output data type (enter - for default)
* 0: FCOMPLEX (default)
* 1: SCOMPLEX
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_LT1_SLC', GeoTIFF, annotation_XML, SLC_par, SLC, dtype]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_MSP(SAR_par, PROC_par, SLC_par, image_format='-', logpath=None, outdir=None, shellscript=None):
"""
| ISP image parameter file from MSP processing parameter and sensor files
| Copyright 2024, Gamma Remote Sensing, v3.7 8-May-2024 clw/uw/of
Parameters
----------
SAR_par:
(input) MSP SAR sensor parameter file
PROC_par:
(input) MSP processing parameter file
SLC_par:
(output) ISP SLC/MLI image parameter file
image_format:
image format flag (enter - for default: from MSP processing parameter file)
* 0: FCOMPLEX (pairs of 4-byte float)
* 1: SCOMPLEX (pairs of 2-byte short integer)
* 2: FLOAT (4-bytes/value)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_MSP', SAR_par, PROC_par, SLC_par, image_format]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_NISAR_RSLC(HDF5, root_name, radcal='-', noise='-', band='-', freq='-', pol='-', out_flag='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate ISP SLC parameter and image files for NISAR Level-1 RSLC data
| Copyright 2025, Gamma Remote Sensing, v1.4 19-May-2025 cm
Parameters
----------
HDF5:
(input) NISAR RSLC data file in HDF5 format (Level-1 Range Doppler Single Look Complex)
root_name:
(output) root name of the generated output files (example: yyyymmdd)
radcal:
radiometric calibration flag (enter - for default)
* 0: none
* 1: beta0
* 2: sigma0 (default)
* 3: gamma0
noise:
noise subtraction using noise equivalent backscatter look-up table (enter - for default)
* 0: do not apply noise subtraction (default)
* 1: apply noise subtraction
band:
radar band L or S (enter - for default: all available radar bands)
freq:
frequencies A or B in case of split imaging bands (enter - for default: all available frequencies)
pol:
polarization HH, HV, RH, RV, VH, or VV (enter - for default: all available polarizations)
out_flag:
output flag (enter - for default)
* 0: write data and parameter files (default)
* 1: only write parameter files
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_NISAR_RSLC', HDF5, root_name, radcal, noise, band, freq, pol, out_flag]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_NovaSAR_GRD(GeoTIFF, XML, polarization, MLI_par, MLI='-', GRD_par='-', GRD='-', rps='-', radcal='-', noise='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate MLI and GRD image and parameter files for NovaSAR GRD and SCD data
| Copyright 2023, Gamma Remote Sensing, v1.8 3-Mar-2023 cm
Parameters
----------
GeoTIFF:
(input) NovaSAR image data file in GeoTIFF format (enter - for none, \\*.tif)
XML:
(input) NovaSAR XML annotation file
polarization:
image polarization: HH, VV, HV, VH, CH, CV
MLI_par:
(output) MLI parameter file (example: yyyymmdd_pp.mli.par)
MLI:
(output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)
GRD_par:
(output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)
GRD:
(output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)
rps:
slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)
radcal:
radiometric calibration flag (enter - for default)
* 0: beta0 (default)
* 1: sigma0
noise:
noise levels flag (enter - for default)
* 0: do not use noise levels (default)
* 1: use noise levels
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_NovaSAR_GRD', GeoTIFF, XML, polarization, MLI_par, MLI, GRD_par, GRD, rps, radcal, noise]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_NovaSAR_SLC(GeoTIFF, XML, polarization, SLC_par, SLC='-', dtype='-', radcal='-', noise='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for NovaSAR SLC data
| Copyright 2023, Gamma Remote Sensing, v1.6 3-Mar-2023 cm
Parameters
----------
GeoTIFF:
(input) NovaSAR image data file in GeoTIFF format (enter - for none, \\*.tif)
XML:
(input) NovaSAR XML annotation file
polarization:
image polarization: HH, VV, HV, VH, CH, CV
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd_pp.slc.par)
SLC:
(output) SLC data file (enter - for none, example: yyyymmdd_pp.slc)
dtype:
output data type (enter - for default: same as input)
* 0: FCOMPLEX
* 1: SCOMPLEX
radcal:
radiometric calibration flag (enter - for default)
* 0: beta0 (default)
* 1: sigma0
noise:
noise levels flag (enter - for default)
* 0: do not use noise levels (default)
* 1: use noise levels
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_NovaSAR_SLC', GeoTIFF, XML, polarization, SLC_par, SLC, dtype, radcal, noise]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_NovaSAR_SRD(GeoTIFF, XML, polarization, MLI_par, MLI='-', radcal='-', noise='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate MLI image and parameter files for NovaSAR SRD data
| Copyright 2023, Gamma Remote Sensing, v1.3 3-Mar-2023 cm
Parameters
----------
GeoTIFF:
(input) NovaSAR image data file in GeoTIFF format (enter - for none, \\*.tif)
XML:
(input) NovaSAR XML annotation file
polarization:
image polarization: HH, VV, HV, VH, CH, CV
MLI_par:
(output) MLI parameter file (example: yyyymmdd_pp.mli.par)
MLI:
(output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)
radcal:
radiometric calibration flag (enter - for default)
* 0: beta0 (default)
* 1: sigma0
noise:
noise levels flag (enter - for default)
* 0: do not use noise levels (default)
* 1: use noise levels
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_NovaSAR_SRD', GeoTIFF, XML, polarization, MLI_par, MLI, radcal, noise]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_PRI(CEOS_SAR_leader, PRI_par, CEOS_DAT, PRI, logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file generation for ERS PRI data from the PGS and VMP processors
| Copyright 2020, Gamma Remote Sensing, v1.7 21-Sep-2020 clw/cm
Parameters
----------
CEOS_SAR_leader:
(input) ERS CEOS SAR leader file for PRI product
PRI_par:
(output) ISP image parameter file (example: .pri.par)
CEOS_DAT:
(input) CEOS data file (example: DAT_01.001)
PRI:
(output) PRI data with file and line headers removed (example: .pri)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_PRI', CEOS_SAR_leader, PRI_par, CEOS_DAT, PRI]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_PRI_ESRIN_JERS(CEOS_SAR_leader, PRI_par, CEOS_DAT, PRI, logpath=None, outdir=None, shellscript=None):
"""
| ISP GRD parameter file for ESRIN processed JERS PRI data
| Copyright 2020, Gamma Remote Sensing, v1.9 21-Sep-2020 clw/uw/cm
Parameters
----------
CEOS_SAR_leader:
(input) ERS CEOS SAR leader file for PRI product
PRI_par:
(output) ISP image parameter file (example: .pri.par)
CEOS_DAT:
(input) CEOS data file (example: DAT_01.001)
PRI:
(output) PRI data with file and line headers removed (example: .pri)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_PRI_ESRIN_JERS', CEOS_SAR_leader, PRI_par, CEOS_DAT, PRI]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_PulSAR(CEOS_SAR_leader, SLC_par, logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file generation for ERS SLC data from the PULSAR SAR processor
| Copyright 2020, Gamma Remote Sensing, v1.3 21-Sep-2020 clw/uw/cm
Parameters
----------
CEOS_SAR_leader:
(input) ERS CEOS SAR leader file
SLC_par:
(output) ISP SLC parameter file (example .slc.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_PulSAR', CEOS_SAR_leader, SLC_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RCM_GRC(RCM_dir, polarization, radcal, noise, SLC_par='-', SLC='-', GRC_par='-', GRC='-', rps='-', noise_pwr='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate slant and ground range complex images and parameter files from a Radarsat Constellation GRC (Ground Range georeferenced Complex) product
| Copyright 2024, Gamma Remote Sensing, v3.0 21-Oct-2024 cm
Parameters
----------
RCM_dir:
(input) Radarsat Constellation main directory path (e.g.: RCM3_OK1001322_PK1001415_1_5M4_20160417_004803_VV_GRC)
polarization:
image polarization: HH, VV, HV, VH, CH, CV
radcal:
radiometric calibration flag (enter - for default)
* 0: none (default)
* 1: Beta Nought
* 2: Sigma Nought
* 3: Gamma
noise:
noise levels flag (enter - for default)
* 0: do not use noise levels file (default)
* 1: use noise levels file
* NOTE: noise levels file can only be used for radiometrically calibrated data (radcal flag: 1, 2, or 3)
SLC_par:
(output) SLC parameter file (example: yyyymmdd_pp.slc.par, enter - for none)
SLC:
(output) SLC data file in slant range geometry (example: yyyymmdd_pp.slc, enter - for none)
GRC_par:
(output) GRC parameter file (example: yyyymmdd_pp.grc.par, enter - for none)
GRC:
(output) GRC data file (example: yyyymmdd_pp.grc, enter - for none)
rps:
slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)
noise_pwr:
(output) noise intensity for each SLC sample in slant range using data from noise levels file (enter - for none)
* NOTE: when the noise_pwr file is specified, noise power correction will NOT be applied to the GRC / SLC data values
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RCM_GRC', RCM_dir, polarization, radcal, noise, SLC_par, SLC, GRC_par, GRC, rps, noise_pwr]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RCM_GRD(RCM_dir, polarization, radcal, noise, MLI_par='-', MLI='-', GRD_par='-', GRD='-', rps='-', noise_pwr='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate MLI and GRD images and parameter files from a Radarsat Constellation GRD (Ground Range georeferenced Detected) product
| Copyright 2024, Gamma Remote Sensing, v2.9 21-Oct-2024 cm
Parameters
----------
RCM_dir:
(input) Radarsat Constellation main directory path (e.g.: RCM1_OK1001327_PK1001418_1_3M28_20160417_013625_HH_GRD)
polarization:
image polarization: HH, VV, HV, VH, CH, CV
radcal:
radiometric calibration flag (enter - for default)
* 0: none (default)
* 1: Beta Nought
* 2: Sigma Nought
* 3: Gamma
noise:
noise levels flag (enter - for default)
* 0: do not use noise levels file (default)
* 1: use noise levels file
* NOTE: noise levels file can only be used for radiometrically calibrated data (radcal flag: 1, 2, or 3)
MLI_par:
(output) MLI parameter file (example: yyyymmdd_pp.mli.par, enter - for none)
MLI:
(output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)
GRD_par:
(output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)
GRD:
(output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)
rps:
slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)
noise_pwr:
(output) noise intensity for each MLI sample in slant range using data from noise levels file (enter - for none)
* NOTE: when the noise_pwr file is specified, noise power correction will NOT be applied to the GRD / MLI data values
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RCM_GRD', RCM_dir, polarization, radcal, noise, MLI_par, MLI, GRD_par, GRD, rps, noise_pwr]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RCM_MLC(RCM_dir, radcal, noise, root_name, logpath=None, outdir=None, shellscript=None):
"""
| Generate parameter and image files for Radarsat Constellation MLC (Multi-Look Complex) data from GeoTIFF or NITF format
| Copyright 2024, Gamma Remote Sensing, v1.4 21-Oct-2024 cm
Parameters
----------
RCM_dir:
(input) Radarsat Constellation main directory path (e.g.: RCM2_OK1782060_PK1782073_2_SC30MCPC_20200504_105537_CH_CV_MLC)
radcal:
radiometric calibration flag (enter - for default)
* 0: none (default)
* 1: Beta Nought
* 2: Sigma Nought
* 3: Gamma
noise:
noise levels flag (enter - for default)
* 0: do not use noise levels file (default)
* 1: use noise levels file
* NOTE: noise levels file can only be used for radiometrically calibrated data (radcal flag: 1, 2, or 3)
root_name:
(output) root name of the generated output files (example: yyyymmdd)
* NOTE: the program will automatically complete the root_name and add extensions for each covariance matrix element
for both data and parameter files, such as 20210927_CH.mlc, 20210927_CH.mlc.par, 20210927_XC.mlc, etc.
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RCM_MLC', RCM_dir, radcal, noise, root_name]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RCM_SLC(RCM_dir, polarization, radcal, noise, SLC_par, SLC, noise_pwr='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for Radarsat Constellation SLC data from GeoTIFF or NITF file
| Copyright 2024, Gamma Remote Sensing, v2.7 21-Oct-2024 cm
Parameters
----------
RCM_dir:
(input) Radarsat Constellation main directory path (e.g.: RCM2_OK1002260_PK1002436_3_SC50MB_20160417_002427_VH_VV_SLC)
polarization:
image polarization: HH, VV, HV, VH, CH, CV
radcal:
radiometric calibration flag (enter - for default)
* 0: none (default)
* 1: Beta Nought
* 2: Sigma Nought
* 3: Gamma
noise:
noise levels flag (enter - for default)
* 0: do not use noise levels file (default)
* 1: use noise levels file
* NOTE: noise levels file can only be used for radiometrically calibrated data (radcal flag: 1, 2, or 3)
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd_pp.slc.par)
SLC:
(output) SLC data file (example: yyyymmdd_pp.slc)
noise_pwr:
(output) noise intensity for each SLC sample in slant range using data from noise levels file (enter - for none)
* NOTE: when the noise_pwr file is specified, noise power correction will NOT be applied to the SLC data values
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RCM_SLC', RCM_dir, polarization, radcal, noise, SLC_par, SLC, noise_pwr]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RCM_SLC_ScanSAR(RCM_dir, polarization, radcal, noise_in, root_name, SLC_tab='-', beam='-', noise_out='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files from Radarsat Constellation ScanSAR SLC data in GeoTIFF or NITF format
| Copyright 2024, Gamma Remote Sensing, v3.3 21-Oct-2024 cm
Parameters
----------
RCM_dir:
(input) Radarsat Constellation main directory path (e.g.: RCM2_OK1002260_PK1002436_3_SC50MB_20160417_002427_VH_VV_SLC)
polarization:
image polarization: HH, VV, HV, VH, CH, CV
radcal:
radiometric calibration flag (enter - for default)
* 0: none (default)
* 1: Beta Nought
* 2: Sigma Nought
* 3: Gamma
noise_in:
noise levels flag (enter - for default)
* 0: do not use noise levels file (default)
* 1: use noise levels file
* NOTE: noise levels file can only be used for radiometrically calibrated data (radcal flag: 1, 2, or 3)
root_name:
(output) root name of the generated output files (example: yyyymmdd_pp)
* NOTE: the program will automatically complete the root_name with beam numbers and extensions for the SLC, SLC_par, and TOPS_par files
SLC_tab:
(output) 3 column list of SLC, SLC_par, and TOPS_par files, with the beams sorted from near to far range (example: yyyymmdd_pp.SLC_tab)
beam:
number specifying the desired ScanSAR beam number (enter - for default: extract all beams)
* NOTE: enter 0 to get the list of the available beams
noise_out:
output noise intensity for each SLC sample in slant range flag (enter - for default)
* 0: do not write noise intensity files (default)
* 1: write noise intensity files (file name(s) automatically defined)
* NOTE: when noise intensity files are written, noise power correction will NOT be applied to the SLC data values
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RCM_SLC_ScanSAR', RCM_dir, polarization, radcal, noise_in, root_name, SLC_tab, beam, noise_out]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RISAT_GRD(CEOS_leader, BAND_META, GRD_par, CEOS_image, GRD='-', line_dir='-', pix_dir='-', cal_flg='-', KdB='-', logpath=None, outdir=None, shellscript=None):
"""
| Read RISAT-1 Ground-Range data from a CEOS data set and perform radiometric calibration
| Copyright 2015, Gamma Remote Sensing, v1.2 24-Feb-2015 clw
Parameters
----------
CEOS_leader:
(input) CEOS SAR leader file (example: lea_01.001)
BAND_META:
(input) BAND_META.txt, additional RISAT system parameters for the scene (format keywork=value)
GRD_par:
(output) ISP GRD parameter file (example: YYYYMMDD.grd.par)
CEOS_image:
(input) CEOS Ground-Range image file (example: dat_01.001)
GRD:
(output) Ground-Range data with file and line headers removed (enter - for none: example: YYYYMMDD.grd)
line_dir:
set output image line direction (enter - for default):
* 0: used value derived from CEOS leader file
* 1: retain input data line direction (default)
* -1: reverse input data line direction
pix_dir:
set output pixel direction (enter - for default):
* 0: used value derived from CEOS leader file
* 1: retain input data pixel direction (default)
* -1: reverse input data pixel direction
cal_flg:
calibration flag (enter - for default):
* 0: do not apply radiometric calibration
* 1: apply radiometric calibration including KdB and incidence angle correction (default)
KdB:
calibration constant (dB) (enter - to use value in the CEOS leader)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RISAT_GRD', CEOS_leader, BAND_META, GRD_par, CEOS_image, GRD, line_dir, pix_dir, cal_flg, KdB]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RISAT_SLC(CEOS_leader, BAND_META, SLC_par, CEOS_image, SLC='-', line_dir='-', pix_dir='-', cal_flg='-', KdB='-', logpath=None, outdir=None, shellscript=None):
"""
| Read RISAT-1 CEOS format SLC data and perform radiometric calibration
| Copyright 2013, Gamma Remote Sensing, v1.1 3-Jun-2013 clw
Parameters
----------
CEOS_leader:
(input) CEOS SAR leader file (example: lea_01.001)
BAND_META:
(input) BAND_META.txt, additional RISAT system parameters for the scene (format keywork=value)
SLC_par:
(output) ISP SLC image parameter file (example: YYYYMMDD.grd.par)
CEOS_image:
(input) CEOS SLC image file (example: dat_01.001)
SLC:
(output) SLC data with file and line headers removed (enter - for none: example: YYYYMMDD.grd)
line_dir:
set output image line direction (enter - for default):
* 0: used value derived from CEOS leader file
* 1: retain input data line direction (default)
* -1: reverse input data line direction
pix_dir:
set output pixel direction (enter - for default):
* 0: used value derived from CEOS leader file
* 1: retain input data pixel direction (default)
* -1: reverse input data pixel direction
cal_flg:
calibration flag (enter - for default):
* 0: do not apply radiometric calibration
* 1: apply radiometric calibration including KdB and incidence angle correction (default)
KdB:
calibration constant (dB) (enter - to use value in the CEOS leader)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RISAT_SLC', CEOS_leader, BAND_META, SLC_par, CEOS_image, SLC, line_dir, pix_dir, cal_flg, KdB]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RSAT2_SG(product_XML, lut_XML, GeoTIFF, polarization, MLI_par='-', MLI='-', GRD_par='-', GRD='-', rps='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate MLI and GRD images and parameter files from Radarsat 2 SGF/SGX/SCF data
| Copyright 2023, Gamma Remote Sensing, v2.2 7-Jun-2023 awi/cw/cm
Parameters
----------
product_XML:
(input) Radarsat-2 product annotation XML file (product.xml)
lut_XML:
(input) Radarsat-2 calibration XML file (lutSigma.xml), use - for no calibration
GeoTIFF:
(input) image data file in GeoTIFF format (imagery_PP.tif)
polarization:
(input) image polarization: HH, VV, HV, VH
MLI_par:
(output) MLI parameter file (example: yyyymmdd_pp.mli.par, enter - for none)
MLI:
(output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)
GRD_par:
(output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)
GRD:
(output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)
rps:
slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)
* NOTE: Ground range geometry is less accurate than slant range geometry and should be avoided
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSAT2_SG', product_XML, lut_XML, GeoTIFF, polarization, MLI_par, MLI, GRD_par, GRD, rps]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RSAT2_SLC(product_XML, lut_XML, GeoTIFF, polarization, SLC_par, SLC, logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for Radarsat 2 SLC data from GeoTIFF
| Copyright 2023, Gamma Remote Sensing, v2.9 7-Jun-2023 awi/clw/cm
Parameters
----------
product_XML:
(input) Radarsat-2 product annotation XML file (product.xml)
lut_XML:
(input) Radarsat-2 calibration XML file (lutSigma.xml), use - for no calibration
GeoTIFF:
(input) image data file in GeoTIFF format (imagery_PP.tif)
polarization:
(input) image polarization: HH, VV, HV, VH
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd_pp.slc.par)
SLC:
(output) SLC data file (example: yyyymmdd_pp.slc)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSAT2_SLC', product_XML, lut_XML, GeoTIFF, polarization, SLC_par, SLC]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RSAT_SCW(CEOS_leader, CEOS_trailer, CEOS_data, GRD_par, GRD, sc_dB='-', dt='-', logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file for SCANSAR Wide Swath Data
| Copyright 2020, Gamma Remote Sensing, v2.2 3-Sep-2020 clw/cm
Parameters
----------
CEOS_leader:
(input) CEOS SAR leader file
CEOS_trailer:
(input) CEOS SAR trailer file
CEOS_data:
(input) CEOS data file binary)
GRD_par:
(output) ISP ground range image parameter file (example .mli.par)
GRD:
(output) ISP ground range image (example .mli) (enter - for none, float)
sc_dB:
intensity scale factor in dB (enter - for default: 0.00)
dt:
azimuth image time offset (s) (enter - for default = 0.0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSAT_SCW', CEOS_leader, CEOS_trailer, CEOS_data, GRD_par, GRD, sc_dB, dt]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RSAT_SGF(CEOS_leader, CEOS_data, GRD_par, GRD, sc_dB='-', dt='-', logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file for RSI/Atlantis Radarsat SGF (ground range) and SCANSAR SCW16 data
| Copyright 2020, Gamma Remote Sensing, v2.4 3-Sep-2020 clw/cm
Parameters
----------
CEOS_leader:
(input) CEOS leader file (RSI SGF or SCW16 products, LEA_01.001)
CEOS_data:
(input) CEOS data file (RSI SGF or SCW16 products, DAT_01.001)
GRD_par:
(output) ISP ground range image parameter file (example .mli.par)
GRD:
(output) ISP ground range image (example .grd.par) (enter - for none, float)
sc_dB:
intensity scale factor in dB (enter - for default: 0.00)
dt:
azimuth image time offset (s) (enter - for default = 0.0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSAT_SGF', CEOS_leader, CEOS_data, GRD_par, GRD, sc_dB, dt]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RSAT_SLC(CEOS_leader, SLC_par, CEOS_data, SLC='-', sc_dB='-', dt='-', logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file for RSI/Atlantis/ASF processed Radarsat SLC data
| Copyright 2012, Gamma Remote Sensing, v4.0 5-Sep-2012 clw
Parameters
----------
CEOS_leader:
(input) CEOS SAR leader file (example: lea_01.001)
SLC_par:
(output) ISP SLC parameter file (example: .slc.par)
CEOS_data:
(input) CEOS data file (example: dat_01.001)
SLC:
(output) SLC data with file and line headers removed (example: .slc)
sc_dB:
intensity scale factor in dB (enter - for default: 60.00)
dt:
azimuth image time offset (s) (enter - for default = 0.0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSAT_SLC', CEOS_leader, SLC_par, CEOS_data, SLC, sc_dB, dt]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_RSI_ERS(CEOS_SAR_leader, SLC_par, logpath=None, outdir=None, shellscript=None):
"""
| ISP parameter file for RSI processed ERS SLC data
| Copyright 2020, Gamma Remote Sensing, v1.8 3-Sep-2020 clw/uw/cm
Parameters
----------
CEOS_SAR_leader:
(input) ERS CEOS SAR leader file
SLC_par:
(output) ISP SLC parameter file (example .slc.par)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSI_ERS', CEOS_SAR_leader, SLC_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_S1_GRD(GeoTIFF, annotation_XML, calibration_XML, noise_XML, MLI_par, MLI, GRD_par='-', GRD='-', eflg='-', rps='-', noise_pwr='-', edge_flag='-', loff='-', nl='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate MLI and GRD images and parameter files from a Sentinel-1 GRD product
| Copyright 2023, Gamma Remote Sensing, v4.8 27-Apr-2023 awi/clw/ts/cm
Parameters
----------
GeoTIFF:
(input) image data file in GeoTIFF format (enter - for none, \\*.tiff)
annotation_XML:
(input) Sentinel-1 L1 XML annotation file
calibration_XML:
(input) Sentinel-1 L1 radiometric calibration XML file (enter - for no radiometric calibration)
noise_XML:
(input) Sentinel-1 L1 noise XML file (enter - to not subtract thermal noise power level)
MLI_par:
(output) MLI parameter file (example: yyyymmdd_pp.mli.par)
MLI:
(output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)
GRD_par:
(output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)
GRD:
(output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)
eflg:
GR-SR grid extrapolation flag (enter - for default)
* 0: no extrapolation of the GR-SR grid beyond the grid boundaries
* 1: permit extrapolation of the GR-SR grid to cover the entire image (default)
* NOTE: extrapolation of the GR-SR grid may introduce geocoding errors
rps:
slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)
noise_pwr:
noise intensity for each MLI sample in slant range using data from noise_XML (enter - for none)
* NOTE: when the noise_pwr file is specified, noise power correction will NOT be applied to the MLI data values
edge_flag:
edge cleaning flag (enter - for default)
* 0: do not clean edges (default for Sentinel-1 IPF version >= 2.90)
* 1: basic method
* 2: elaborate method based on Canny edge detection (default for Sentinel-1 IPF version < 2.90)
* 3: force basic method when Sentinel-1 IPF version >= 2.90
* 4: force elaborate method based on Canny edge detection when Sentinel-1 IPF version >= 2.90
* NOTE: options 1 and 2 are changed to 0 when Sentinel-1 IPF version >= 2.90
loff:
offset to starting line of the input segment (enter - for default: 0)
nl:
number of lines to read from the file beginning at loff (enter - for default: to end of file)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_S1_GRD', GeoTIFF, annotation_XML, calibration_XML, noise_XML, MLI_par, MLI, GRD_par, GRD, eflg, rps, noise_pwr, edge_flag, loff, nl]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_S1_SLC(GeoTIFF, annotation_XML, calibration_XML, noise_XML, SLC_par, SLC, TOPS_par='-', dtype='-', sc_dB='-', noise_pwr='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for Sentinel-1 SLC data
| Copyright 2025, Gamma Remote Sensing, v5.7 14-Apr-2025 awi/clw/cm
Parameters
----------
GeoTIFF:
(input) image data file in \\*.tiff GeoTIFF format (enter - for default: none)
annotation_XML:
(input) Sentinel-1 L1 XML annotation file
calibration_XML:
(input) Sentinel-1 L1 radiometric calibration XML file to generate output as sigma0
(enter - for default: return uncalibrated digital numbers)
noise_XML:
(input) Sentinel-1 L1 noise XML file (enter - for default: no subtraction of thermal noise power)
SLC_par:
(output) ISP SLC parameter file. Example: yyyymmdd_iw1_vv.slc.par
SLC:
(output) SLC data file (enter - for default: none). Example: yyyymmdd_iw1_vv.slc
TOPS_par:
(output) SLC burst annotation file; for TOPS and EW SLC data only (enter - for default: none). Example: yyyymmdd_iw1_vv.slc.tops_par
dtype:
output data type (enter - for default)
* 0: FCOMPLEX (default)
* 1: SCOMPLEX
sc_dB:
scale factor for FCOMPLEX -> SCOMPLEX, (enter - for default: HH,VV (dB): 60.0000, VH,HV: 70.0000)
noise_pwr:
noise intensity for each SLC sample in slant range using data from noise_XML (enter - for none)
* NOTE: when the noise_pwr file is specified, noise power will NOT be subtracted from the image data values
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_S1_SLC', GeoTIFF, annotation_XML, calibration_XML, noise_XML, SLC_par, SLC, TOPS_par, dtype, sc_dB, noise_pwr]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_SAOCOM_GRD(data, XML, MLI_par, MLI='-', GRD_par='-', GRD='-', rps='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate MLI parameter and image files for SAOCOM L1B Ground Range Detected Images
| Copyright 2025, Gamma Remote Sensing, v1.0 13-Jan-2025 cm
Parameters
----------
data:
(input) SAOCOM image data file in binary format (enter - for none, e.g. di--acqId0000729082-a-tw--2411281122-hh-m)
XML:
(input) SAOCOM XML annotation file (e.g. di--acqId0000729082-a-tw--2411281122-hh-m.xml)
MLI_par:
(output) MLI parameter file (example: yyyymmdd_pp.mli.par)
MLI:
(output) MLI data file (FCOMPLEX, enter - for none, example: yyyymmdd_pp.mli)
GRD_par:
(output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)
GRD:
(output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)
rps:
slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SAOCOM_GRD', data, XML, MLI_par, MLI, GRD_par, GRD, rps]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_SAOCOM_SLC(data, XML, SLC_par, SLC='-', TOPS_par='-', RSLC_par='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for SAOCOM stripmap and TOPS SLC data
| Copyright 2023, Gamma Remote Sensing, v1.6 21-Mar-2023 cm
Parameters
----------
data:
(input) SAOCOM image data file in binary format (enter - for none, e.g. slc-acqId0000089010-a-tna-0000000000-s3qp-hh)
XML:
(input) SAOCOM XML annotation file (e.g. slc-acqId0000089010-a-tna-0000000000-s3qp-hh.xml)
SLC_par:
(output) SLC parameter file (example: yyyymmdd_s3_pp.slc.par)
SLC:
(output) SLC data file (FCOMPLEX, enter - for none, example: yyyymmdd_s3_pp.slc)
TOPS_par:
(output) SLC burst annotation file, TOPS data only (enter - for none, example: yyyymmdd_s3_vv.slc.tops_par)
RSLC_par:
(input) reference SLC parameter file to keep consistent range pixel spacing (example: yyyymmdd_s1_pp.slc.par)
* NOTE: SAOCOM geocoded data in GeoTIFF format (GEC and GTC / level 1C and 1D data) can be read using par_SAOCOM_geo program
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SAOCOM_SLC', data, XML, SLC_par, SLC, TOPS_par, RSLC_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_SICD_SLC(NITF, radcal, noise, SLC_par, SLC='-', XML='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for SICD SLC data
| Copyright 2025, Gamma Remote Sensing, v2.0 28-Apr-2025 cm
Parameters
----------
NITF:
(input) Sensor Independent Complex Data (SICD) file in NITF 2.1 container file (e.g.: CAPELLA_C03_SM_SICD_HH_20210512034455_20210512034459.ntf)
radcal:
radiometric calibration flag (enter - for default)
* 0: none
* 1: beta0 (default)
* 2: sigma0
* 3: gamma0
* 4: RCS (target radar cross section in m^2)
noise:
noise levels flag (enter - for default)
* 0: do not use noise levels (default)
* 1: use noise levels
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd.slc.par)
SLC:
(output) SLC data file (enter - for none, example: yyyymmdd.slc)
XML:
(output) XML metadata file (enter - for none, example: CAPELLA_C03_SM_SICD_HH_20210512034455_20210512034459.xml)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SICD_SLC', NITF, radcal, noise, SLC_par, SLC, XML]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_SIRC(CEOS_leader, SLC_par, UTC_MET='-', logpath=None, outdir=None, shellscript=None):
"""
| ISP SLC parameter file from SIR-C CEOS leader file
| Copyright 2025, Gamma Remote Sensing, v2.7 28-May-2025 clw/uw
Parameters
----------
CEOS_leader:
(input) JPL SIR-C CEOS leader file
SLC_par:
(output) ISP SLC parameter file
UTC_MET:
time reference for state vectors: MET (Mission Elapsed Time) or UTC (enter - for default: UTC)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SIRC', CEOS_leader, SLC_par, UTC_MET]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_STRIX(CEOS_leader, SLC_par, CEOS_data, SLC='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for Synspective StriX SLC data
| Copyright 2023, Gamma Remote Sensing, v1.5 9-May-2023 awi/cm
Parameters
----------
CEOS_leader:
(input) CEOS leader file for STRIX-alpha SLC data (LED-STRIXA...)
SLC_par:
(output) ISP image parameter file (example: yyyymmdd.slc.par)
CEOS_data:
(input) STRIX-alpha CEOS format SLC (IMG-pp-STRIXA...)
SLC:
(output) reformatted STRIX SLC (example: yyyymmdd.slc, enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_STRIX', CEOS_leader, SLC_par, CEOS_data, SLC]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_SV2_SLC(GeoTIFF, annotation_XML, SLC_par, SLC='-', dtype='-', radcal='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter file and SLC image from a SuperView Neo-2 / SuperView-2 / Gaojing-2 data set
| Copyright 2025, Gamma Remote Sensing, v1.3 12-May-2025 awi/cm
Parameters
----------
GeoTIFF:
(input) image data file in GeoTIFF format (enter - for none, \\*.tiff)
annotation_XML:
(input) SV-2 product annotation XML file (\\*.meta.xml)
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd.slc.par)
SLC:
(output) SLC data file, example: yyyymmdd.slc (enter - for none, SLC output will not be produced)
dtype:
output data type (enter - for default)
* 0: FCOMPLEX (default)
* 1: SCOMPLEX
radcal:
output radiometric calibration flag (enter - for default)
* 0: beta0
* 1: sigma0 (default)
* 2: gamma0
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SV2_SLC', GeoTIFF, annotation_XML, SLC_par, SLC, dtype, radcal]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_SWOT_SLC(NETCDF, trunk, DEM='-', DEM_par='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter and image files for SWOT level 1B KaRIn SLC data
| Copyright 2024, Gamma Remote Sensing, v1.2 30-Oct-2024 cm
Parameters
----------
NETCDF:
(input) SWOT level 1B KaRIn SLC data file in NETCDF format (``SWOT_L1B_..._PIC0_01.nc``)
trunk:
(output) file name trunk used for output filenames
(example: yyyymmdd -> yyyymmdd_L_minus_y.slc yyyymmdd_L_minus_y.slc.par)
DEM:
(output) DEM file in SCH coordinates (enter - for none)
DEM_par:
(output) DEM parameter file in SCH coordinates (enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SWOT_SLC', NETCDF, trunk, DEM, DEM_par]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_TX_GRD(annotation_XML, GeoTIFF, GRD_par, GRD='-', pol='-', MLI_par='-', MLI='-', rps='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate ground range image and image parameter file for Terrasar-X MGD data in GeoTIFF format
| Copyright 2023, Gamma Remote Sensing, v1.5 8-May-2023 awi/clw/cm
Parameters
----------
annotation_XML:
(input) Terrasar-X product annotation XML file
GeoTIFF:
(input) image data file in GeoTIFF format
* NOTE: make sure the data set contains the selected polarization
GRD_par:
(output) ISP ground range image parameter file (example: yyyymmdd.grd.par, enter - for none)
GRD:
(output) calibrated ground range data file (example: yyyymmdd.grd, enter - for none)
pol:
polarization: HH, HV, VH, VV (enter - for default: first polarization found in the annotation_XML)
MLI_par:
(output) MLI parameter file (example: yyyymmdd.mli.par, enter - for none)
MLI:
(output) MLI data file in slant range geometry (example: yyyymmdd.mli, enter - for none)
rps:
slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_TX_GRD', annotation_XML, GeoTIFF, GRD_par, GRD, pol, MLI_par, MLI, rps]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_TX_ScanSAR(annotation_XML, swath, SLC_par, SLC, TOPS_par, bwflg='-', dtype='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC, SLC_par and TOPS_par from a Terrasar-X ScanSAR data set
| Copyright 2023, Gamma Remote Sensing, v2.4 18-Apr-2023 clw/cm/awi
Parameters
----------
annotation_XML:
(input) TerraSAR-X ScanSAR product annotation XML file including path
* NOTE: The path to the image products is determined from the path to the XML annotation
swath:
number specifying the desired ScanSAR swath (1 -> maximum number of swaths (4 or 6))
* NOTE: The image product name is specified in the XML file
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd.slc.par)
SLC:
(output) SLC ScanSAR data file, example: yyyymmdd.slc
(enter - for none, SLC output will not be produced)
TOPS_par:
(output) SLC ScanSAR burst annotation file (example: yyyymmdd_s1.slc.tops_par
bwflg:
burst window flag (enter - for default)
* 0: use first and last annotation line values specified in the annotation_XML
* 1: extend first and last valid line to include all data lines (default)
dtype:
output data type (enter - for default)
* 0: same as input (default)
* 1: FCOMPLEX
* NOTE: While TSX ScanSAR data are not acquired in TOPS mode, the same data structure can be used for burst annotation
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_TX_ScanSAR', annotation_XML, swath, SLC_par, SLC, TOPS_par, bwflg, dtype]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_TX_SLC(annotation_XML, COSAR, SLC_par, SLC, pol='-', dtype='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate SLC parameter file and SLC image from a Terrasar-X SSC data set
| Copyright 2023, Gamma Remote Sensing, v2.5 6-Mar-2023 awi/clw/cm
Parameters
----------
annotation_XML:
(input) TerraSAR-X product annotation XML file
COSAR:
(input) COSAR SSC stripmap or spotlight mode SLC data file
SLC_par:
(output) ISP SLC parameter file (example: yyyymmdd.slc.par)
SLC:
(output) SLC data file, example: yyyymmdd.slc (enter - for none, SLC output will not be produced)
pol:
polarization HH, HV, VH, VV (enter - for default: first polarization found in the annotation_XML)
dtype:
output data type (enter - for default)
* 0: same as input (default)
* 1: FCOMPLEX
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_TX_SLC', annotation_XML, COSAR, SLC_par, SLC, pol, dtype]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def par_UAVSAR_SLC(ann, SLC_MLC_in, SLC_MLI_par, SLC_MLI_out='-', image_type='-', image_format='-', DOP='-', logpath=None, outdir=None, shellscript=None):
"""
| ISP image parameter file from UAVSAR annotation file (ann) for SLC and MLC products
| Copyright 2025, Gamma Remote Sensing, v2.0 31-Mar-2025 clw/cm
Parameters
----------
ann:
(input) UAVSAR annotation file (\\*ann.txt or \\*.ann)
SLC_MLC_in:
(input) UAVSAR binary data file (required for annotation file version 1.2) (enter - for none)
SLC_MLI_par:
(output) ISP image parameter file
SLC_MLI_out:
(output) SLC data file (enter - for none)
image_type:
image type flag (enter - for default)
* 0: SLC (slc) in slant range coordinates (default)
* 1: MLC (mlc) in slant range coordinates
HHHH\\*, VVVV\\*, HVHV\\* are FLOAT format
HHHV\\*, HHVV\\*, HVVV\\* are FCOMPLEX format
image_format:
image data format flag (enter - for default)
* 0: FCOMPLEX (pairs of 4-byte float (re,im)) (default)
* 2: FLOAT (4-bytes/value)
DOP:
(input) UAVSAR Doppler look-up table (if not zero-Doppler) (enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_UAVSAR_SLC', ann, SLC_MLC_in, SLC_MLI_par, SLC_MLI_out, image_type, image_format, DOP]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ph_slope_base(int_in, SLC_par, OFF_par, base, int_out, int_type='-', inverse='-', logpath=None, outdir=None, shellscript=None):
"""
| Subtract/add interferogram flat-Earth phase trend as estimated from initial baseline
| Copyright 2023, Gamma Remote Sensing, v4.5 19-Apr-2023 clw
Parameters
----------
int_in:
(input) interferogram (FCOMPLEX) or unwrapped phase (FLOAT) (unflattened)
SLC_par:
(input) ISP parameter file for the reference SLC
OFF_par:
(input) ISP offset/interferogram parameter file
base:
(input) baseline file
int_out:
(output) interferogram (FCOMPLEX) or unwrapped phase (FLOAT) with phase trend subtracted/added
int_type:
interferogram type (enter - for default)
* 0: unwrapped phase
* 1: complex interferogram (default)
inverse:
subtract/add inversion flag (enter - for default)
* 0: subtract phase ramp (default)
* 1: add phase ramp
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ph_slope_base', int_in, SLC_par, OFF_par, base, int_out, int_type, inverse]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def phase_slope(interf, slopes, width, win_sz='-', thres='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate interferogram phase slopes in range and azimuth
| Copyright 2023, Gamma Remote Sensing, v1.4 18-Apr-2023 clw/uw
Parameters
----------
interf:
(input) interferogram (fcomplex)
slopes:
(output) range and azimuth phase slopes (fcomplex)
width:
number of samples/row
win_sz:
size of region used for slopes determination (enter - for default: 5)
thres:
correlation threshold for accepting slope estimates 0.0 -> 1.0 (enter - for default: .4)
xmin:
starting range pixel offset (enter - for default: 0)
xmax:
last range pixel offset (enter - for default: width-1)
ymin:
starting azimuth row offset (enter - for default: 0)
ymax:
last azimuth row offset (enter - for default: nlines-1)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/phase_slope', interf, slopes, width, win_sz, thres, xmin, xmax, ymin, ymax]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def PRC_vec(SLC_par, PRC, nstate='-', logpath=None, outdir=None, shellscript=None):
"""
| State vectors from ERS PRC orbit data for ISP processing clw/uw
| Copyright 2023, Gamma Remote Sensing, v1.9 11-Oct-2023 clw
Parameters
----------
SLC_par:
(input/output) ISP SLC/MLI image parameter file
PRC:
(input) PRC state vector file
nstate:
number of state vectors (enter - for default: 5, maximum: 1024)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/PRC_vec', SLC_par, PRC, nstate]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ptarg_cal_MLI(MLI_par, MLI, r_samp, az_samp, psigma, c_r_samp, c_az_samp, ptr_image, r_plot, az_plot, pcal, osf='-', win='-', pltflg='-', psz='-', csz='-', theta_inc='-', logpath=None, outdir=None, shellscript=None):
"""
| Point target analysis and radiometric calibration of slant-range and ground-range (GRD) images
| Copyright 2016, Gamma Remote Sensing, v2.6 19-Feb-2016 clw
Parameters
----------
MLI_par:
(input) slant-range or ground-range image parameter file for detected intensity data
MLI:
(input) ground-range or slant range detected image in FLOAT format
r_samp:
point target range sample number, target region size is 16x16
az_samp:
point target azimuth line number, target region size is 16x16
psigma:
radar cross-section of the calibration target in m\\*\\*2
c_r_samp:
clutter region center range sample number, clutter region size is 16x16
c_az_samp:
clutter region center azimuth line number, clutter region size is 16x16
ptr_image:
(output) oversampled point target image, with and without phase gradient, nominal width: 256
r_plot:
(output) range point target response plot data (text format)
az_plot:
(output) azimuth point target response plot data (text format)
pcal:
(output) measured point target parameters and radiometric calibration factor (text format)
osf:
image over-sampling factor, 2, 4, 8, 16, 32, 64 (enter - for default: 16)
win:
maximum search window offset (samples) (enter - for default: 1)
pltflg:
plotting mode flag:
* 0: none
* 1: output plots in PNG format (default)
* 2: screen output
* 3: output plots in PDF format
psz:
point target region size (samples) (enter - for default: 16)
csz:
clutter region size (samples) (enter - for default: 16)
theta_inc:
incidence angle required for calibration of terrain corrrected RISAT-1 images
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ptarg_cal_MLI', MLI_par, MLI, r_samp, az_samp, psigma, c_r_samp, c_az_samp, ptr_image, r_plot, az_plot, pcal, osf, win, pltflg, psz, csz, theta_inc]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ptarg_cal_SLC(SLC_par, SLC, r_samp, az_samp, psigma, c_r_samp, c_az_samp, ptr_image, r_plot, az_plot, pcal, osf='-', win='-', pltflg='-', psz='-', csz='-', c_image='-', logpath=None, outdir=None, shellscript=None):
"""
| Point target analysis and radiometric calibration of SLC images
| Copyright 2016, Gamma Remote Sensing, v2.4 19-Feb-2016 clw
Parameters
----------
SLC_par:
(input) SLC image parameter file
SLC:
(input) SLC image in FCOMPLEX or SCOMPLEX format
r_samp:
point target range sample number, target region size is 16x16
az_samp:
point target azimuth line number, target region size is 16x16
psigma:
radar cross-section of the calibration target in m\\*\\*2
c_r_samp:
clutter region center range sample number, clutter region size is 16x16
c_az_samp:
clutter region center azimuth line number, clutter region size is 16x16
ptr_image:
(output) oversampled point target image, with and without phase gradient, nominal width: 256
r_plot:
(output) range point target response plot data (text format)
az_plot:
(output) azimuth point target response plot data (text format)
pcal:
(output) measured point target parameters and radiometric calibration factor (text format)
osf:
image over-sampling factor, 2, 4, 8, 16, 32, 64 (enter - for default: 16)
win:
maximum search window offset (samples) (enter - for default: 1)
pltflg:
plotting mode flag:
* 0: none
* 1: output plots in PNG format (default)
* 2: screen output
* 3: output plots in PDF format
psz:
point target region size (samples) (enter - for default: 16)
csz:
clutter region size (samples) (enter - for default: 16)
c_image:
(output) clutter region image (FCOMPLEX format)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ptarg_cal_SLC', SLC_par, SLC, r_samp, az_samp, psigma, c_r_samp, c_az_samp, ptr_image, r_plot, az_plot, pcal, osf, win, pltflg, psz, csz, c_image]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ptarg_SLC(SLC_par, SLC, r_samp, az_samp, ptr_image, r_plot, az_plot, ptr_par='-', osf='-', win='-', pltflg='-', logpath=None, outdir=None, shellscript=None):
"""
| Point target response analysis and interpolation for SLC images
| Copyright 2024, Gamma Remote Sensing, v2.0 4-Oct-2024 clw
Parameters
----------
SLC_par:
(input) SLC image parameter file
SLC:
(input) SLC image in FCOMPLEX or SCOMPLEX format
r_samp:
point target range sample number
az_samp:
point target azimuth line number
ptr_image:
(output) oversampled point target image (fcomplex, 1024x1024 samples), with and without phase gradient
r_plot:
(output) range point target response plot data (text format)
az_plot:
(output) azimuth point target response plot data (text format)
ptr_par:
(output) measured point target parameters (text format)
osf:
image over-sampling factor, 2, 4, 8, 16, 32, 64 (enter - for default: 16)
win:
maximum search window offset (samples) (enter - for default: 1)
pltflg:
plotting mode flag:
* 0: none
* 1: output plots in PNG format (default)
* 2: screen output
* 3: output plots in PDF format
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ptarg_SLC', SLC_par, SLC, r_samp, az_samp, ptr_image, r_plot, az_plot, ptr_par, osf, win, pltflg]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def radcal_MLI(MLI, MLI_par, OFF_par, CMLI, antenna='-', rloss_flag='-', ant_flag='-', refarea_flag='-', sc_dB='-', K_dB='-', pix_area='-', logpath=None, outdir=None, shellscript=None):
"""
| Radiometric calibration for multi-look intensity (MLI) data
| Copyright 2023, Gamma Remote Sensing, v2.4 6-Jul-2023 uw/clw/of
Parameters
----------
MLI:
(input) MLI image (FLOAT)
MLI_par:
(input) SLC parameter file of input MLI image
OFF_par:
(input) ISP offset/interferogram parameter file (enter - for images in MLI geometry)
CMLI:
(output) radiometrically calibrated output MLI (FLOAT)
antenna:
(input) 1-way antenna gain pattern file (enter - for none)
rloss_flag:
range spreading loss correction (enter - for default)
* 0: no correction (default)
* 1: apply r^3 correction (all modes except ASAR APS)
* 2: apply r^4 correction (used only for ASAR APS mode)
* -1: undo r^3 correction
* -2: undo r^4 correction
ant_flag:
antenna pattern correction (enter - for default)
* 0: no correction (default)
* 1: apply antenna pattern correction
* -1: undo antenna pattern correction
refarea_flag:
reference pixel area correction (enter - for default)
* 0: no pixel area correction (default)
* 1: calculate sigma0, scale area by sin(inc_ang)/sin(ref_inc_ang)
* 2: calculate gamma0, scale area by sin(inc_ang)/(cos(inc_ang)\\*sin(ref_inc_ang)
* -1: undo sigma0 area scaling factor
* -2: undo gamma0 area scaling factor
sc_dB:
scale factor in dB (enter - for default: 0.0)
K_dB:
calibration factor in dB (enter - for default: value from MLI_par)
pix_area:
(output) ellipsoid-based ground range sigma0 or gamma0 pixel reference area (FLOAT) (enter - for none)
refarea_flag 1 or -1: sigma0 ref. area
refarea_flag 2 or -2: gamma0 ref. area
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/radcal_MLI', MLI, MLI_par, OFF_par, CMLI, antenna, rloss_flag, ant_flag, refarea_flag, sc_dB, K_dB, pix_area]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def radcal_PRI(PRI, PRI_par, GRD, GRD_par, K_dB='-', inc_ref='-', roff='-', nr='-', loff='-', nl='-', logpath=None, outdir=None, shellscript=None):
"""
| Convert ESA processed short integer format PRI to radiometrically calibrated GRD image (float)
| Copyright 2023, Gamma Remote Sensing, v1.7 19-Apr-2023 uw/clw
Parameters
----------
PRI:
(input) PRI ground-range image (short integer, sqrt(backscat. intensity)
PRI_par:
(input) SLC parameter file of input PRI ground-range image (yyyymmdd.pri.par)
GRD:
(output) calibrated ground-range image (float, backscat. intensity)
GRD_par:
(output) ISP image parameter file of output calibrated ground-range image (yyyymmdd.grd.par)
K_dB:
calibration factor in decibels (enter - for default: 59.75 dB)
ERS1 (D-Paf,ESRIN): 58.24 dB, ERS2 (D-Paf,ESRIN,I-Paf,UK-Paf after 1997): 59.75 dB
ENVISAT ASAR: 55.0 dB (all modes)
for details see product specifications and ESA publications.
inc_ref:
reference incidence angle in deg. (enter - for default: 23.0 deg.)
ENVISAT ASAR: 90.0 deg. (all modes)
roff:
offset to starting range sample (enter - for default: 0)
nr:
number of range samples (enter - for default: to end of line)
loff:
offset to starting line (enter - for default: 0, 1 header line in the input file is assumed for ERS)
nl:
number of lines to copy (enter - for default: to end of file)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/radcal_PRI', PRI, PRI_par, GRD, GRD_par, K_dB, inc_ref, roff, nr, loff, nl]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def radcal_pwr_stat(SLC_tab, SLC_tab_cal, plist, MSR_cal, PWR_cal, roff='-', loff='-', nr='-', nl='-', plist_out='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate calibrated SLC image files using point targets determined from the Mean/Sigma Ratio and Intensity
| Copyright 2022, Gamma Remote Sensing, v1.5 8-Nov-2022 clw/uw/cm
Parameters
----------
SLC_tab:
(input) two column list of the SLC filenames and SLC parameter filenames of the uncalibrated SLC images
SLC_tab_cal:
(input) two column list of the SLC filenames and SLC parameter filenames of the calibrated SLC images (enter - for none)
plist:
(input) point list for the point to use for calibraton (int, enter - to use the data to determine the calibration points)
MSR_cal:
mean/sigma ratio for point target selection for relative calibration between scenes: 1.500
PWR_cal:
intensity threshold ratio for point target selection for relative calibration between scenes: 1.000
roff:
offset to starting range of section to analyze (default -: 0)
loff:
offset to starting line of section to analyze (default -: 0)
nr:
number of range pixels to analyze (default -: to end of line)
nl:
number of azimuth lines to analyze (default -: to end of file)
plist_out:
point list of points used to determine calibration using MSR_cal and PWR_cal thresholds
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/radcal_pwr_stat', SLC_tab, SLC_tab_cal, plist, MSR_cal, PWR_cal, roff, loff, nr, nl, plist_out]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def radcal_SLC(SLC, SLC_par, CSLC, CSLC_par, fcase='-', antenna='-', rloss_flag='-', ant_flag='-', refarea_flag='-', sc_dB='-', K_dB='-', pix_area='-', logpath=None, outdir=None, shellscript=None):
"""
| Radiometric calibration of SLC data
| Copyright 2023, Gamma Remote Sensing, v2.8 6-Jul-2023 uw/clw/of
Parameters
----------
SLC:
(input) SLC (FCOMPLEX or SCOMPLEX)
SLC_par:
(input) SLC parameter file of input SLC
CSLC:
(output) radiometrically calibrated SLC (FCOMPLEX or SCOMPLEX)
CSLC_par:
(output) SLC parameter file of output calibrated SLC
fcase:
format case (enter - for default)
* 1: FCOMPLEX --> FCOMPLEX (pairs of FLOAT) (default)
* 2: FCOMPLEX --> SCOMPLEX (pairs of SHORT INTEGER)
* 3: SCOMPLEX --> FCOMPLEX
* 4: SCOMPLEX --> SCOMPLEX
antenna:
1-way antenna gain pattern file (enter - for none)
rloss_flag:
range spreading loss correction (enter - for default)
* 0: no correction (default)
* 1: apply r^3 correction (all modes except ASAR APS)
* 2: apply r^4 correction (used only for ASAR APS mode)
* -1: undo r^3 correction
* -2: undo r^4 correction
ant_flag:
antenna pattern correction (enter - for default)
* 0: no correction (default)
* 1: apply antenna pattern correction
* -1: undo antenna pattern correction
refarea_flag:
reference pixel area correction (enter - for default)
* 0: no pixel area correction (default)
* 1: calculate sigma0, scale area by sin(inc_ang)/sin(ref_inc_ang)
* 2: calculate gamma0, scale area by sin(inc_ang)/(cos(inc_ang)\\*sin(ref_inc_ang)
* -1: undo sigma0 area scaling factor
* -2: undo gamma0 area scaling factor
sc_dB:
scale factor in dB (enter - for default: 0.0)
K_dB:
calibration factor in dB (enter - for default: value from SLC_par)
pix_area:
(output) ellipsoid-based ground range sigma0 or gamma0 pixel reference area (FLOAT) (enter - for none)
refarea_flag 1 or -1: sigma0 ref. area
refarea_flag 2 or -2: gamma0 ref. area
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/radcal_SLC', SLC, SLC_par, CSLC, CSLC_par, fcase, antenna, rloss_flag, ant_flag, refarea_flag, sc_dB, K_dB, pix_area]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def rascc_mask(cc, pwr, width, start_cc='-', start_pwr='-', nlines='-', pixavr='-', pixavaz='-', cc_thres='-', pwr_thres='-', cc_min='-', cc_max='-', scale='-', exp='-', LR='-', rasf='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate phase unwrapping validity mask using correlation and intensity
| Copyright 2023, Gamma Remote Sensing, v2.2 19-Apr-2023 clw/uw
Parameters
----------
cc:
(input) interferometric correlation image (FLOAT)
pwr:
(input) intensity image (FLOAT, enter - if not available)
width:
number of samples/row
start_cc:
starting line of coherence image (enter - for default: 1)
start_pwr:
starting line of intensity image (enter - for default: 1)
nlines:
number of lines to display (enter - or 0 for default: to end of file)
pixavr:
number of pixels to average in range (enter - for default: 1)
pixavaz:
number of pixels to average in azimuth (enter - for default: 1)
cc_thres:
coherence threshold for masking, pixels with cc < cc_thres are set to 0 (enter - for default: 0.0)
pwr_thres:
relative intensity threshold for masking, pixels with intensity < pwr_thres \\* average intensity are set to 0 (enter - for default: 0)
cc_min:
minimum coherence value used for color display (enter - for default: 0.1)
cc_max:
maximum coherence value used for color display (enter - for default: 0.9)
scale:
intensity display scale factor (enter - for default: 1.0)
exp:
intensity display exponent (enter - for default: 0.35)
LR:
image mirror flag (enter - for default)
* 1: normal (default)
* -1: mirror image
rasf:
(output) image filename, extension determines the format, enter - for default: \\*.tif
\\*.bmp BMP format
\\*.ras Sun raster format
\\*.tif TIFF format
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/rascc_mask', cc, pwr, width, start_cc, start_pwr, nlines, pixavr, pixavaz, cc_thres, pwr_thres, cc_min, cc_max, scale, exp, LR, rasf]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def rascc_mask_thinning(ras_in, in_file, width, ras_out, nmax='-', thresholds='-', logpath=None, outdir=None, shellscript=None):
"""
| Adaptive sampling reduction for phase unwrapping validity mask
| Copyright 2023, Gamma Remote Sensing, v1.7 19-Apr-2023 uw/clw
Parameters
----------
ras_in:
(input) validity mask (SUN/BMP/TIFF raster format 8-bit image)
in_file:
(input) file used for adaptive sampling reduction, e.g. correlation coefficient (float)
width:
number of samples/row of in_file
ras_out:
(output) validity mask with reduced sampling (8-bit SUN rasterfile or BMP format image)
nmax:
number of sampling reduction runs (enter - for default: 3)
thresholds:
a list of thresholds sorted from smallest to largest scale sampling reduction
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/rascc_mask_thinning', ras_in, in_file, width, ras_out, nmax, thresholds]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def res_map(hgt, gr, data, SLC_par, OFF_par, res_hgt, res_data, nr='-', naz='-', azps_res='-', loff='-', nlines='-', logpath=None, outdir=None, shellscript=None):
"""
| Slant range to ground range transformation based on interferometric ground-range
| Copyright 2023, Gamma Remote Sensing, v2.6 18-Apr-2023 clw/uw
Parameters
----------
hgt:
(input) height file in slant range geometry
gr:
(input) ground range file in slant range geometry
data:
(input) data file in slant range geometry (float) (intensity \\*.pwr or correlation \\*.cc)
SLC_par:
(input) ISP parameter file of reference SLC
OFF_par:
(input) offset/interferogram processing parameters
res_hgt:
(output) resampled height file in ground range geometry
res_data:
(output) resampled data file in ground range geometry
nr:
number of range samples for L.S. estimate (enter - for default: 7, must be odd)
naz:
number of azimuth samples for L.S. extimate (enter - for default: 7, must be odd)
azps_res:
azimuth output map sample spacing in meters (enter - for default: azimuth spacing)
loff:
offset to starting line for height calculations (enter - for default: 0)
nlines:
number of lines to calculate (enter - for default: to end of file)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/res_map', hgt, gr, data, SLC_par, OFF_par, res_hgt, res_data, nr, naz, azps_res, loff, nlines]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def residue(int, flag, width, xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Determine interferometric phase unwrapping residues
| Copyright 2023, Gamma Remote Sensing, v2.8 18-Apr-2023 clw/uw
Parameters
----------
int:
(input) interferogram (fcomplex)
flag:
(input) flag file (unsigned char)
width:
number of samples/row
xmin:
offset to starting range pixel (enter - for default: 0)
xmax:
offset last range pixel (enter - for default: width-1)
ymin:
offset to starting azimuth row (enter - for default: 0)
ymax:
offset to last azimuth row (enter - for default: nlines-1)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/residue', int, flag, width, xmin, xmax, ymin, ymax]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def residue_cc(int, flag, width, xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Determine interferometric phase unwrapping residues considering low coherence regions
| Copyright 2023, Gamma Remote Sensing, v2.8 18-Apr-2023 clw/uw/ts
Parameters
----------
int:
(input) interferogram (fcomplex)
flag:
(input) flag file (unsigned char)
width:
number of samples/row
xmin:
offset to starting range pixel (enter - for default: 0)
xmax:
offset last range pixel (enter - for default: width-1)
ymin:
offset to starting azimuth row (enter - for default: 0)
ymax:
offset to last azimuth row (enter - for default: nlines-1)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/residue_cc', int, flag, width, xmin, xmax, ymin, ymax]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def RSAT2_vec(SLC_par, RSAT2_orb, nstate='-', logpath=None, outdir=None, shellscript=None):
"""
| Extract Radarsat-2 state vectors from a definitive orbit file
| Copyright 2022, Gamma Remote Sensing, v1.1 clw/cm 7-Nov-2022
Parameters
----------
SLC_par:
(input) ISP image parameter file
RSAT2_orb:
Radarsat-2 definitive orbit data file available from MDA (orbit_number_def.orb)
nstate:
number of state vectors to extract (enter - for default: 9)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/RSAT2_vec', SLC_par, RSAT2_orb, nstate]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def S1_burstloc(annotation_XML, logpath=None, outdir=None, shellscript=None):
"""
| Print Burst information found in the Sentinel-1 annotation file
| Copyright 2025, Gamma Remote Sensing, v1.4 3-Feb-2025 awi/cm
Parameters
----------
annotation_XML:
(input) Sentinel-1 L1 XML annotation file
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/S1_burstloc', annotation_XML]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def S1_ETAD_SLC(ETAD, SLC1_tab, SLC2_tab, OPOD='-', corr='-', phase='-', tropo='-', iono='-', tides='-', bistatic='-', Doppler='-', FM_rate='-', mode='-', order='-', logpath=None, outdir=None, shellscript=None):
"""
| Read and apply Sentinel-1 Extended Timing Annotation Dataset (ETAD) to correct range and azimuth timings of Sentinel-1 SLC images
| Copyright 2025, Gamma Remote Sensing, v1.1 25-Jun-2025 cm
Parameters
----------
ETAD:
(input) ETAD directory (e.g. S1A_IW_ETA__AXDV_20240807T172347_20240807T172414_055110_06B719_202E.SAFE)
ETAD can be downloaded from https://dataspace.copernicus.eu/
SLC1_tab:
(input) SLC_tab of Sentinel-1 TOPS or Stripmap SLC (e.g. 20240807.SLC_tab)
SLC2_tab:
(output) SLC_tab of Sentinel-1 TOPS or Stripmap SLC with ETAD correction (e.g. 20240807.ETAD.SLC_tab)
OPOD:
replace state vectors by precision orbit data (OPOD) provided with ETAD data (enter - for default)
* 0: no
* 1: yes (default)
corr:
apply following timing corrections (enter - for default)
* 0: no correction
* 1: all corrections (default)
* 2: all corrections in range only
* 3: all corrections in azimuth only
* 4: select individual corrections (defined in subsequent options)
phase:
apply phase corrections corresponding to the selected timing corrections in range (enter - for default)
* 0: no
* 1: yes (default)
* 2: yes, experimental mode (phase corrections written to file(s))
tropo:
apply corrections for tropospheric delay in range (enter - for default)
* 0: no
* 1: yes (default)
iono:
apply corrections for ionospheric delay in range (enter - for default)
* 0: no
* 1: yes (default)
tides:
apply corrections for solid Earth tides (enter - for default)
* 0: no
* 1: yes, in range and azimuth (default)
* 2: range only
* 3: azimuth only
bistatic:
apply corrections for bistatic azimuth shifts (enter - for default)
* 0: no
* 1: yes (default)
Doppler:
apply corrections for Doppler-induced range shifts (enter - for default)
* 0: no
* 1: yes (default)
FM_rate:
apply corrections for FM-rate mismatch azimuth shifts (enter - for default)
* 0: no
* 1: yes (default)
mode:
complex data interpolation mode (enter - for default)
* 0: Lanczos (default)
* 1: B-spline
order:
Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 5)
NOTES: - SLC1_tab and SLC2_tab or their contents can be the same files (the files will be overwritten in that case)
- if SLC2_tab doesn't exist, it will be automatically created with file names derived from SLC1_tab contents
- SLC_tab line entries:
- TOPS mode: SLC SLC_par TOPS_par
- Stripmap mode: SLC SLC_par
- with [phase] = 1, phase corrections only use ionospheric delays and solid Earth tides in range direction
- with [phase] = 2, phase corrections also include compensation for tropospheric path delays
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/S1_ETAD_SLC', ETAD, SLC1_tab, SLC2_tab, OPOD, corr, phase, tropo, iono, tides, bistatic, Doppler, FM_rate, mode, order]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def S1_OPOD_vec(SLC_par, OPOD, nstate='-', logpath=None, outdir=None, shellscript=None):
"""
| Extract Sentinel-1 OPOD state vectors and copy into the ISP image parameter file
| Copyright 2025, Gamma Remote Sensing, v1.8 23-Jan-2024 awi/clw/cm
Parameters
----------
SLC_par:
(input/output) ISP SLC/MLI image parameter file
OPOD:
(input) Sentinel-1 OPOD orbit data file (AUX_POEORB or AUX_RESORB)
orbit files can be downloaded from https://s1qc.asf.alaska.edu/ or https://dataspace.copernicus.eu/
nstate:
number of state vectors to extract (enter - for default: include 60 sec extention at the start and end of the SLC data)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/S1_OPOD_vec', SLC_par, OPOD, nstate]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def sbi_filt(SLC1, SLC1_par, SLC2R_par, SLCf, SLCf_par, SLCb, SLCb_par, norm_sq, iwflg='-', logpath=None, outdir=None, shellscript=None):
"""
| Azimuth filtering of SLC data to support split-beam interferometry to measure azimuth offsets
| Copyright 2023, Gamma Remote Sensing, v1.6 clw/cm 18-Apr-2023
Parameters
----------
SLC1:
(input) SLC image (SCOMPLEX or FCOMPLEX format)
SLC1_par:
(input) SLC image parameter file
SLC2R_par:
(input) SLC2 ISP image parameter file for the co-registered image of the interferometric pair,
used to determine azimuth common-band for each output SLC (enter - for none)
SLCf:
(output) SLC image (forward-looking, FCOMPLEX format)
SLCf_par:
(output) SLC parameter file (forward-looking)
SLCb:
(output) SLC image (backward-looking, FCOMPLEX format)
SLCb_par:
(output) SLC parameter file (backward-looking)
norm_sq:
squint between beams as a fraction of the azimuth spectrum width (default: 0.5)
iwflg:
inverse weighting flag (enter - for default)
* 0: no compensation for azimuth spectrum weighting
* 1: compensate for the azimuth spectrum weighting (default)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/sbi_filt', SLC1, SLC1_par, SLC2R_par, SLCf, SLCf_par, SLCb, SLCb_par, norm_sq, iwflg]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def sbi_offset(sbi_unw, SLCf_par, SLCb_par, OFF_par, az_offset, logpath=None, outdir=None, shellscript=None):
"""
| Calculate azimuth offsets from unwrapped split-beam interferogram
| Copyright 2022, Gamma Remote Sensing, v1.1 8-Nov-2022
Parameters
----------
sbi_unw:
(input) unwrapped phase of split-beam interferogram (float)
SLCf_par:
(input) reference SLC parameter file (forward-looking)
SLCb_par:
(input) reference SLC parameter file (backward-looking)
OFF_par:
(input) offset parameter file
az_offset:
(output) azimuth offsets (m)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/sbi_offset', sbi_unw, SLCf_par, SLCb_par, OFF_par, az_offset]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ScanSAR_burst_copy(SLC, SLC_par, TOPS_par, SLC_out, SLC_out_par, burst_num, drflg='-', SLC_par2='-', dtype='-', logpath=None, outdir=None, shellscript=None):
"""
| Copy selected burst from Sentinel-1 TOPS SLC to a file
| Copyright 2023, Gamma Remote Sensing, v2.1 18-Apr-2023 awi/clw/cm
Parameters
----------
SLC:
(input) ScanSAR mode burst SLC
SLC_par:
(input) SLC parameter file for the ScanSAR burst scene
TOPS_par:
(input) burst parameter file for the ScanSAR burst SLC
SLC_out:
(output) SLC file containing a single burst
SLC_out_par:
(output) SLC parameter file for the single burst
burst_num:
burst number of selected burst (1 -> number of bursts in the SLC)
drflg:
deramp phase flag (enter - for default)
* 0: no modification of the burst SLC phase (default)
* 1: subtract TOPS mode Doppler phase ramp for Sentinel-1 (deramp)
SLC_par2:
(output) SLC parameter file for the single burst SLC with deramped phase (drflg: 1, enter - for none)
dtype:
output data type (enter - for default: same as input data):
* 0: FCOMPLEX
* 1: SCOMPLEX
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_burst_copy', SLC, SLC_par, TOPS_par, SLC_out, SLC_out_par, burst_num, drflg, SLC_par2, dtype]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ScanSAR_burst_corners(SLC_par, TOPS_par, KML='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate corner geographic coordinates of ScanSAR burst data and generate a KML with burst rectangles
| Copyright 2025, Gamma Remote Sensing, v1.5 10-Feb-2025 awi/rc/cw
Parameters
----------
SLC_par:
(input) SLC parameter file for the ScanSAR burst data
TOPS_par:
(input) ScanSAR burst parameter file
KML:
(output) KML output file
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_burst_corners', SLC_par, TOPS_par, KML]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ScanSAR_burst_MLI(SLC_tab, MLI_tab, rlks, azlks, bflg='-', SLCR_tab='-', MLI_dir='-', scale='-', logpath=None, outdir=None, shellscript=None):
"""
| Generate MLI burst data from ScanSAR burst SLC data (Sentinel-1, RCM, and TSX)
| Copyright 2024, Gamma Remote Sensing v2.5 25-Jun-2024 clw/cm
Parameters
----------
SLC_tab:
(input) 3 column list of ScanSAR SLC, swaths are listed in order from near to far range
SLC_tab line entries: SLC SLC_par TOPS_par
MLI_tab:
(output) 3 column list of MLI swaths listed in order from near to far range
MLI_tab line entries: MLI MLI_par TOPS_par
* NOTE: if the MLI_tab does not yet exist, the file entries will be created with names derived from the SLC_tab entries
rlks:
number of range looks (1...80)
azlks:
number of azimuth look (1...20)
bflg:
burst window calculation flag (enter - for default)
* 0: use existing burst window parameters if they exist, otherwise calculate burst window parameters (default)
* 1: calculate burst window parameters from burst parameters and the number of range and azimuth looks
SLCR_tab:
(input) 3 column list of the reference scene with swaths, listed in order from near to far range (enter - for none)
SLCR_tab line entries: SLC SLC_par TOPS_par
MLI_dir:
directory for output burst MLI data, ignored if the MLI_tab already exists (enter - for default: current directory)
scale:
scale factor for output MLI (enter - for default: calculate from calibration gain in SLC parameter file)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_burst_MLI', SLC_tab, MLI_tab, rlks, azlks, bflg, SLCR_tab, MLI_dir, scale]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ScanSAR_burst_overlap(SLC_tab, root_name, rlks, azlks, mode='-', bflg='-', SLCR_tab='-', dburst='-', bound='-', logpath=None, outdir=None, shellscript=None):
"""
| Extract and mosaic overlapping parts of ScanSAR / TOPS burst data
| Copyright 2023, Gamma Remote Sensing v1.8 18-Apr-2023 cm/clw/uw
Parameters
----------
SLC_tab:
(input) 3 column list of SLC, SLC_par, Sentinel-1 TOPS_par sorted in the order IW1, IW2, IW3...
root_name:
(output) output data root name (example: yyyymmdd_pp_overlap)
rlks:
number of range looks used to determine burst window boundaries
azlks:
number of azimuth looks used to determine burst window boundaries
mode:
output mode (enter - for default)
* 0: output data are mosaics, non-overlapping parts are set to 0 (default)
* 1: output data are mosaics, non-overlapping parts are written
* 2: output data are burst data containing only overlapping parts
* 3: output data is a polygon file with polygons encompassing overlapping areas in the SLC mosaic
* 4: output data is a polygon file with polygons encompassing overlapping areas in the MLI mosaic
bflg:
burst window calculation flag (enter - for default)
* 0: use existing burst window parameters if they exist, otherwise calculate burst window parameters (default)
* 1: recalculate burst window parameters from burst parameters and the number of range and azimuth looks
SLCR_tab:
(input) SLC_tab of the reference scene, 3 column list of SLC, SLC_par, TOPS_par sorted sorted in the order IW1, IW2, IW3 (enter - for none)
* NOTE: When generating a mosaic of a resampled SLC, the SLC_tab of the reference scene is required
dburst:
delta burst number (1=overlap of subsequent bursts, enter - for default: 1)
bound:
boundary pixels in polygon (enter - for default: 0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_burst_overlap', SLC_tab, root_name, rlks, azlks, mode, bflg, SLCR_tab, dburst, bound]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ScanSAR_burst_to_mosaic(DATA_tab, mosaic, MLI_par, mflg='-', data_tab_ref='-', min_ovr='-', max_ovr='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate mosaic of multilook ScanSAR burst data (FLOAT or FCOMPLEX)
| Copyright 2023, Gamma Remote Sensing v2.5 18-Apr-2023 clw/cm
Parameters
----------
DATA_tab:
(input) 3 column list of swaths in ML_DATA burst geometry listed in the order from near to far range
DATA_tab line entries: DATA MLI_par TOPS_par
* NOTE: The data type (FLOAT or FCOMPLEX) is specified in the MLI_par and the burst parameters (TOPS_par) must agree
mosaic:
(output) mosaic image from bursts in multi-look geometry
MLI_par:
(output) mosaic image parameter file
mflg:
mosaicking option flag (enter - for default)
* 0: no overlap between bursts or image swaths (default)
* 1: average data in the overlap between bursts and in the overlap between image swaths
* 2: average data in the overlap between bursts but not in the overlap between image swaths
data_tab_ref:
(input) reference scene DATA_tab, 3 column list of DATA, MLI_par, TOPS_par listed in order from near to far range (enter - for none)
* NOTE: When generating a mosaic produced using data from a resampled scene, the MLI_tab of the reference scene is required
min_ovr:
minimum number of overlapping bursts (using mflg = 1 or 2, enter - for default: 1)
max_ovr:
maximum number of overlapping bursts (using mflg = 1 or 2, enter - for default: unlimited)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_burst_to_mosaic', DATA_tab, mosaic, MLI_par, mflg, data_tab_ref, min_ovr, max_ovr]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ScanSAR_full_aperture_SLC(SLC1_tab, SLC2_tab, SLCR_tab='-', SLC2_dir='-', vmode='-', wflg='-', imode='-', order='-', n_ovr='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate continuous SLC data from ScanSAR burst data (Sentinel-1, RCM, and TSX)
| Copyright 2023, Gamma Remote Sensing v1.9 18-Apr-2023 clw/cm
Parameters
----------
SLC1_tab:
(input) 3 column list of ScanSAR SLC swaths listed in order from near to far range
SLC1_tab line entries: SLC SLC_par TOPS_par
SLC2_tab:
(input/output) 3 column list of oversampled continuous SLC swaths listed in order from near to far range
SLC2_tab line entries: SLC SLC_par
* NOTE: if the SLC2_tab does not yet exist, the file entries will be created with names derived from the SLC1_tab entries
SLCR_tab:
(input) 3 column list of the reference scene with swaths, listed in order from near to far range (enter - for none)
SLCR_tab line entries: SLC SLC_par TOPS_par
SLC2_dir:
directory for output oversampled continuous SLC, ignored if the SLC2_tab already exists (enter - or . for the current directory)
vmode:
sample validity mode (enter - for default):
* 0: all data in the burst are considered valid (default)
* 1: interpolate samples between the valid data bounds of the burst
wflg:
burst window calculation flag (enter - for default):
* 0: use existing burst window parameters if they exist, otherwise calculate burst window parameters (default)
* 1: calculate burst window parameters from burst parameters and the number of range and azimuth looks
imode:
interpolation mode (enter - for default):
* 0: Lanczos (default)
* 1: B-spline
order:
Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 5)
dtype output data type, (enter - for default: same as input data):
* 0: FCOMPLEX
* 1: SCOMPLEX
n_ovr:
SLC oversampling factor, must be in the range 2 --> 32 (enter - for default: automatically calculated)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_full_aperture_SLC', SLC1_tab, SLC2_tab, SLCR_tab, SLC2_dir, vmode, wflg, imode, order, n_ovr]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ScanSAR_mosaic_to_burst(DATA, MLI_par, DATA_tab, logpath=None, outdir=None, shellscript=None):
"""
| Resample image data in the MLI mosaic geometry to burst MLI geometry (FLOAT or FCOMPLEX)
| Copyright 2023, Gamma Remote Sensing v1.5 3-Apr-2023 clw/cm
Parameters
----------
DATA:
(input) data in mosaic geometry (FLOAT or FCOMPLEX data type)
MLI_par:
image parameter file in mosaic geometry
DATA_tab:
3 column list of the output data in burst geometry, swaths are in order from near to far range
MLI_tab line entries: DATA MLI_par TOPS_par
* NOTE: 1.The burst MLI_par and TOPS_par files describing the output geometry must already exist
2.The data type (FLOAT or FCOMPLEX) specified in the MLI_par and the burst parameters (TOPS_par) must agree
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_mosaic_to_burst', DATA, MLI_par, DATA_tab]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def slant_range(SLC_par, slr, logpath=None, outdir=None, shellscript=None):
"""
| Calculate slant range for every range sample
| Copyright 2022, Gamma Remote Sensing v1.2 8-Nov-2022 cw
Parameters
----------
SLC_par:
(input) SLC or MLI image parameter file
slr:
(output) slant range for every sample in the image (float)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/slant_range', SLC_par, slr]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_adf(SLC, ref_SLC, ref_SLC_par, SLC_filt, mode='-', alpha='-', nfft_r='-', nfft_az='-', r_step='-', az_step='-', mwin_r='-', mwin_az='-', logpath=None, outdir=None, shellscript=None):
"""
| Adaptive filtering of SLC data based on the local PSD of a reference SLC image
| Copyright 2023, Gamma Remote Sensing, v1.4 18-Apr-2023 clw/cm
Parameters
----------
SLC:
(input) SLC to be filtered (FCOMPLEX or SCOMPLEX)
ref_SLC:
(input) reference SLC
ref_SLC_par:
(input) reference SLC parameter file
SLC_filt:
(output) output filtered SLC using the power spectrum of the reference SLC
mode:
SLC filtering mode (enter - for default):
* 0: 1D range PSD filter
* 1: 1D azimuth PSD filter
* 2: 2D range PSD \\* azimuth PSD filter
* 3: 2D median-filtered PSD filtering (default)
alpha:
exponent to apply to PSD value (enter - for default: 0.30)
nfft_r:
range filter FFT window size, 2\\*\\*N, 16->1024, (enter - for default: 128)
nfft_az:
azimuth filter FFT window size, 2\\*\\*N, 16->1024, (enter - for default: 128)
r_step:
range processing step (enter - for default: nfft_r/4)
az_step:
azimuth processing step (enter - for default: nfft_az/4)
mwin_r:
range median window size for median PSD filtering (enter - for default: 5)
mwin_az:
azimuth median window size for median PSD filtering (enter - for default: 5)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_adf', SLC, ref_SLC, ref_SLC_par, SLC_filt, mode, alpha, nfft_r, nfft_az, r_step, az_step, mwin_r, mwin_az]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_cat(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, SLC3, SLC3_par, dopflg='-', iflg='-', phflg='-', gainflg='-', imode='-', order='-', logpath=None, outdir=None, shellscript=None):
"""
| Concatenate a pair of SLC images with interpolation of the second scene
| Copyright 2024, Gamma Remote Sensing, v2.8 18-Jul-2024 clw/cm
Parameters
----------
SLC1:
(input) SLC1 image (FCOMPLEX or SCOMPLEX)
SLC2:
(input) SLC2 image to be appended to SLC1 (same type as SLC1)
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset parameter file containing offset polynomials between SLC1 and SLC2
SLC3:
(output) concatenated SLC
SLC3_par:
(output) ISP image parameter file for concatenated image
dopflg:
Doppler flag (enter - for default)
* 0: ignore Doppler centroid information, assume 0 Hz Doppler centroid
* 1: use Doppler centroid information for interpolation (default)
iflg:
input data type flag (enter - for default)
* 0: input data are SLC images, use data type specified in SLC_par files (SCOMPLEX or FCOMPLEX) (default)
* 1: input scenes are interferograms, force FCOMPLEX data type
phflg:
phase offset correction flag (enter - for default)
* 0: no phase offset correction for SLC2 (default)
* 1: apply constant phase offset correction to SLC2
gainflg:
gain correction flag (enter - for default)
* 0: no gain correction for SLC2 (default)
* 1: apply gain correction to SLC2 using calibration gain values in parameter files
* 2: apply gain correction to SLC2 using relative intensity of overlap areas
imode:
interpolation mode for SLC2 (enter - for default)
* 0: Lanczos interpolation (default)
* 1: B-spline interpolation
order:
Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_cat', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, SLC3, SLC3_par, dopflg, iflg, phflg, gainflg, imode, order]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_cat_ScanSAR(SLC_tab1, SLC_tab2, SLC_tab3, bin_flag='-', logpath=None, outdir=None, shellscript=None):
"""
| Concatenate sequential ScanSAR burst SLC images
| Copyright 2024, Gamma Remote Sensing v3.5 5-Mar-2024 clw/cm
Parameters
----------
SLC_tab1:
(input) 3 column list of ScanSAR SLC, swaths are listed in order from near to far range (earlier time)
SLC_tab line entries: SLC SLC_par TOPS_par
SLC_tab2:
(input) 3 column list of ScanSAR SLC, swaths are listed in order from near to far range (later time)
SLC_tab line entries: SLC SLC_par TOPS_par
SLC_tab3:
(input) 3 column list of concatenated ScanSAR SLC, swaths are listed in order from near to far range
SLC_tab line entries: SLC SLC_par TOPS_par
bin_flag:
binary data flag (enter - for default)
* 0: no binary data generated (concatenate parameter files only)
* 1: binary data generated (default)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_cat_ScanSAR', SLC_tab1, SLC_tab2, SLC_tab3, bin_flag]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_copy(SLC_in, SLC_par_in, SLC_out, SLC_par_out, fcase='-', sc='-', roff='-', nr='-', loff='-', nl='-', swap='-', header_lines='-', logpath=None, outdir=None, shellscript=None):
"""
| Copy SLC with options for data format conversion, segment extraction, swap real and imaginary, swap near and far range, and azimuth spectrum shift
| Copyright 2023, Gamma Remote Sensing, v6.1 1-May-2023 uw/clw/cm/of
Parameters
----------
SLC_in:
(input) SLC (FCOMPLEX or SCOMPLEX format)
SLC_par_in:
(input) ISP SLC parameter file for input SLC
SLC_out:
(output) selected SLC section (FCOMPLEX or SCOMPLEX format)
SLC_par_out:
(output) ISP SLC parameter file of output SLC
fcase:
data format conversion (enter - for default: output format = input format)
* 1: FCOMPLEX --> FCOMPLEX (default sc = 1.0)
* 2: FCOMPLEX --> SCOMPLEX (default sc = 10000.0)
* 3: SCOMPLEX --> FCOMPLEX (default sc = 0.0001)
* 4: SCOMPLEX --> SCOMPLEX (default sc = 1.0)
sc:
scale factor for input SLC data (enter - for default)
roff:
offset to starting range sample (enter - for default: 0)
nr:
number of range samples (enter - for default: to end of line)
loff:
offset to starting line (enter - for default: 0)
nl:
number of lines to copy (enter - for default: to end of file)
swap:
swap data (enter - for default)
* 0: normal (default)
* 1: swap real/imaginary part of complex data
* 2: swap left/right (near/far range)
* 3: shift the SLC azimuth spectrum by 1/2 the azimuth sample rate
header_lines:
number of input file header lines (enter - for default: 0)
* NOTE: CEOS format SLC data have 1 header line
* NOTE: file offset pointer size (bytes): 8
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_copy', SLC_in, SLC_par_in, SLC_out, SLC_par_out, fcase, sc, roff, nr, loff, nl, swap, header_lines]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_copy_ScanSAR(SLC1_tab, SLC2_tab, BURST_tab, dtype='-', SLC2_dir='-', logpath=None, outdir=None, shellscript=None):
"""
| Burst selection and copy from ScanSAR burst data (FCOMPLEX, SCOMPLEX)
| Copyright 2024, Gamma Remote Sensing v3.7 29-Feb-2024 clw/cm
Parameters
----------
SLC1_tab:
(input) 3 column list of ScanSAR SLC1 swaths in order from near to far range
SLC1_tab line entries: SLC SLC_par TOPS_par
SLC2_tab:
(input/output) 3 column list of the burst data copied from the ScanSAR swaths listed in SLC1_tab, in order from near to far range
SLC2_tab line entries: SLC SLC_par TOPS_par
* NOTE: If the SLC2_tab does not yet exist, the SLC2_tab will be created with file names derived from the SLC1_tab entries and the SLC2_dir
The new file names will have _2 appended to the root file names of the entries in SLC1_tab
BURST_tab:
(input) 2 column list of the first and last burst to copy from each swath, one line for each swath
BURST_tab line entries: first_burst last_burst
NOTES: 1. The first burst is 1, enter - to select last physical burst
2. If first_burst <= 0, then blank bursts are generated at the start of the output swath
3. If last_burst exceeds the number of bursts, then blank bursts are appended to the end of the output swath
dtype:
output data format for complex data (enter - for default: output data has the same format as input data):
* 0: FCOMPLEX
* 1: SCOMPLEX
SLC2_dir:
directory for ScanSAR burst data copied from SLC1 data, ignored if the SLC2_tab already exists (enter - for default: current directory)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_copy_ScanSAR', SLC1_tab, SLC2_tab, BURST_tab, dtype, SLC2_dir]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_corners(SLC_par, terra_alt='-', kml='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate SLC/MLI image corners in geodetic latitude and longitude (deg.)
| Copyright 2022, Gamma Remote Sensing, v2.2 8-Nov-2022 clw/awi/cm
Parameters
----------
SLC_par:
(input) ISP SLC/MLI image parameter file
terra_alt:
(input) average terrain altitude (enter - for default: 300.000 meters)
kml:
(output) kml output file (enter - for none)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_corners', SLC_par, terra_alt, kml]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_deramp(SLC1, SLC1_par, SLC2, SLC2_par, mode, dop_ph='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate and subtract Doppler phase from an SLC image
| Copyright 2023, Gamma Remote Sensing, v1.7 18-Apr-2023 clw
Parameters
----------
SLC1:
(input) SLC data file (FCOMPLEX or SCOMPLEX format)
SLC1_par:
(input) SLC parameter file with Doppler information
SLC2:
(output) SLC with Doppler phase removed (or added)
SLC2_par:
(output) SLC parameter file for the output SLC
mode:
mode of operation:
* 0: subtract Doppler phase ramp (deramp)
* 1: add Doppler phase ramp (reramp)
dop_ph:
(output) Doppler phase (FLOAT) (enter - for none)
Note: SLC1_par contains the Doppler polynomial that is used to calculate the Doppler phase ramp
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_deramp', SLC1, SLC1_par, SLC2, SLC2_par, mode, dop_ph]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_deramp_ScanSAR(SLC1_tab, SLC2_tab, mode, phflg='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate and subtract ScanSAR or TOPS Doppler phase from burst SLC data
| Copyright 2023, Gamma Remote Sensing, v2.1 18-Apr-2023 clw/cm
Parameters
----------
SLC1_tab:
(input) 3 column list of input ScanSAR SLC, swaths are listed in order from near to far range:
SLC_tab line entries: SLC SLC_par TOPS_par
SLC2_tab:
(input) 3 column list of output ScanSAR SLC, swaths are listed in order from near to far range
mode:
mode of operation:
* 0: subtract ScanSAR Doppler phase (deramp)
* 1: add Doppler phase ramp (reramp)
phflg:
deramp phase flag (enter - for default)
* 0: do not save ScanSAR Doppler phase (default)
* 1: save ScanSAR Doppler phase, output filename is the same as the deramped SLC with extension .dph
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_deramp_ScanSAR', SLC1_tab, SLC2_tab, mode, phflg]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_deskew(SLC1, SLC1_par, SLC2, SLC2_par, mode='-', interp='-', order='-', deramp='-', ph_corr='-', sr0='-', sr2='-', logpath=None, outdir=None, shellscript=None):
"""
| Change geometry from Doppler centroid to zero-Doppler (deskew) or vice-versa
| Copyright 2024, Gamma Remote Sensing, v1.6 17-Oct-2024 cm/clw/uw
Parameters
----------
SLC1:
(input) SLC image file (FCOMPLEX or SCOMPLEX format)
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2:
(output) SLC image file in new geometry
SLC2_par:
(output) SLC2 ISP image parameter file
mode:
mode of operation (enter - for default)
* 0: change geometry from Doppler centroid to zero-Doppler (deskew, default)
* 1: change geometry from zero-Doppler to Doppler centroid (reskew)
interp:
interpolation method (enter - for default)
* 0: Lanczos interpolation (default)
* 1: B-spline interpolation
order:
Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)
deramp:
deramp flag (enter - for default)
* 0: do not deramp and reramp data
* 1: deramp data before interpolation and reramp afterwards (default)
ph_corr:
range shift phase correction flag (enter - for default)
* 0: do not correct phase related to range shift
* 1: correct phase related to range shift (default)
sr0:
near range distance of the resampled image in meter (enter - for default: calculated from input)
sr2:
far range distance of the resampled image in meter (enter - for default: calculated from input)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_deskew', SLC1, SLC1_par, SLC2, SLC2_par, mode, interp, order, deramp, ph_corr, sr0, sr2]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_freq_shift(SLC, SLC_par, SLC_shift, SLC_shift_par, freq_shift, logpath=None, outdir=None, shellscript=None):
"""
| ISP Program GAMMA_SOFTWARE-20250625/ISP/bin/SLC_freq_shift
| Shift the effective radar carrier frequency of an SLC image by a specified amount
| Copyright 2022, Gamma Remote Sensing, v1.1 8-Nov-2022 clw
Parameters
----------
SLC:
(input) SLC file (FCOMPLEX or SCOMPLEX)
SLC_par:
(input) SLC parameter file
SLC_shift:
(output) SLC data with shifted radar carrier frequency
SLC_shift_par:
(output) SLC parameter file with shifted radar carrier frequency
freq_shift:
radar carrier frequency shift (Hz)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_freq_shift', SLC, SLC_par, SLC_shift, SLC_shift_par, freq_shift]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_interp(SLC2, SLC1_par, SLC2_par, OFF_par, SLC2R, SLC2R_par, loff='-', nlines='-', mode='-', order='-', logpath=None, outdir=None, shellscript=None):
"""
| SLC complex image resampling using 2-D Lanczos or B-spline interpolation
| Copyright 2023, Gamma Remote Sensing, v4.9 18-Apr-2023 clw/cm
Parameters
----------
SLC2:
(input) SLC2 image to be resampled to the geometry of the SLC1 reference image
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
SLC2R:
(output) single-look complex image 2 coregistered to SLC1
SLC2R_par:
(output) SLC2R ISP image parameter file for coregistered image
loff:
offset to first valid output line (in SLC1 lines) (enter - for default: 0)
nlines:
number of valid output lines (enter - or 0 for default: to end of file)
mode:
interpolation mode (enter - for default)
* 0: Lanczos (default)
* 1: B-spline
order:
Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_interp', SLC2, SLC1_par, SLC2_par, OFF_par, SLC2R, SLC2R_par, loff, nlines, mode, order]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_interp_map(SLC2, SLC1_par, SLC2_par, OFF_par, SLC2R, SLC2R_par, OFF_par2, coffs_sm, loff='-', nlines='-', mode='-', order='-', logpath=None, outdir=None, shellscript=None):
"""
| SLC image resampling using a 2-D offset map
| Copyright 2024, Gamma Remote Sensing, v4.3 22-Aug-2024 clw/uw/cm
Parameters
----------
SLC2:
(input) SLC2 image to be resampled to the reference SLC1 reference image
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2_par:
(input) SLC2 ISP image parameter file
OFF_par:
(input) ISP offset/interferogram parameter file
SLC2R:
(output) single-look complex image 2 coregistered to SLC1
SLC2R_par:
(output) SLC2R ISP image parameter file for co-registered image
OFF_par2:
(input) ISP offset/interferogram parameter file used for residual offsets map (coffs_sm)
coffs_sm:
(input) smoothed residual range and azimuth offsets (fcomplex)
loff:
offset to first valid output line (in SLC1 lines) (enter - for default: 0)
nlines:
number of valid output lines (enter - or 0 for default: to end of file)
mode:
interpolation mode (enter - for default)
* 0: Lanczos (default)
* 1: B-spline
order:
Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_interp_map', SLC2, SLC1_par, SLC2_par, OFF_par, SLC2R, SLC2R_par, OFF_par2, coffs_sm, loff, nlines, mode, order]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_interp_ScanSAR(SLC2_tab, SLC2_par, SLC1_tab, SLC1_par, OFF_par, SLC2R_tab, SLC2R='-', SLC2R_par='-', mode='-', order='-', SLC2R_dir='-', burst_check='-', logpath=None, outdir=None, shellscript=None):
"""
| Resample ScanSAR burst mode SLC using global offset polynomial
| Copyright 2025, Gamma Remote Sensing v4.4 28-Jan-2025 clw/cm
Parameters
----------
SLC2_tab:
(input) 3 column list of ScanSAR SLC2 swaths to be resampled into the geometry of SLC1 listed in order from near to far range
SLC2_tab line entries: SLC SLC_par TOPS_par
SLC2_par:
(input) SLC parameter file of ScanSAR SLC2 mosaic, SLC2 is generated from the ScanSAR swaths listed in SLC2_tab
SLC1_tab:
(input) 3 column list of the reference ScanSAR SLC swaths listed in order from near to far range
SLC1_par:
(input) SLC parameter file of the reference ScanSAR SLC1 mosaic, SLC1 is generated from the ScanSAR swaths listed in SLC1_tab
OFF_par:
(input) global ISP offset and interferogram parameter file, the offset model is determined from the ScanSAR SLC mosaics
* NOTE: The OFF_par specifies the number of range and azimuth looks required to determine valid data bounds (burst windows)
SLC2R_tab:
(input/output) 3 column list of the resampled ScanSAR SLC2 swaths listed in order from near to far range
* NOTE: If the SLC2R_tab does not yet exist, the entires will be created with file names derived from the filenames in SLC2_tab and the SLC2R_dir
The file extensions of the new entries are changed from slc to rslc
SLC2R:
(output) mosaic generated from the resampled swaths listed in SLC2R_tab, coregistered to the reference mosaic of SLC1 (enter - for none)
SLC2R_par:
(output) SLC parameter file associated with the mosaic created from the resampled swaths SLC2R (enter - for none)
mode:
complex data interpolation mode (enter - for default)
* 0: Lanczos (default)
* 1: B-spline
order:
Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)
SLC2R_dir:
directory for resampled burst SLC2R data, ignored if the DIFF_tab already exists (enter - for default: current directory)
burst_check:
check and update burst parameters to match actual data (enter - for default)
* 0: no (default)
* 1: yes
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_interp_ScanSAR', SLC2_tab, SLC2_par, SLC1_tab, SLC1_par, OFF_par, SLC2R_tab, SLC2R, SLC2R_par, mode, order, SLC2R_dir, burst_check]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_intf(SLC1, SLC2R, SLC1_par, SLC2R_par, OFF_par, interf, rlks, azlks, loff='-', nlines='-', sps_flg='-', azf_flg='-', rp1_flg='-', rp2_flg='-', SLC1s='-', SLC2Rs='-', SLC_1s_par='-', SLC_2Rs_par='-', az_beta='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate interferogram from co-registered SLC image data
| Copyright 2024, Gamma Remote Sensing, v6.3 8-Mar-2024 clw/uw/cm
Parameters
----------
SLC1:
(input) single-look complex image 1 (reference)
SLC2R:
(input) single-look complex image 2 coregistered to SLC1
SLC1_par:
(input) SLC1 ISP image parameter file
SLC2R_par:
(input) SLC2R ISP image parameter file for the co-registered image
OFF_par:
(input) ISP offset/interferogram parameter file
interf:
(output) interferogram from SLC1 and SLC2R
rlks:
number of range looks
azlks:
number of azimuth looks
loff:
offset to starting line relative to SLC1 for interferogram (enter - for default: 0)
nlines:
number of SLC lines to process (enter - for default: to end of file)
sps_flg:
range spectral shift flag (enter - for default)
* 1: apply range spectral shift filter (default)
* 0: do not apply range spectral shift filter
azf_flg:
azimuth common band filter flag (enter - for default)
* 1: apply azimuth common-band filter (default)
* 0: do not apply azimuth common band filter
rp1_flg:
SLC1 image range phase mode (enter - for default)
* 0: non-zero Doppler geometry
* 1: zero-Doppler geometry (default)
rp2_flg:
SLC2 image range phase mode (enter - for default)
* 0: non-zero Doppler geometry
* 1: zero-Doppler geometry (default)
SLC1s:
SLC1 after range spectral shift and azimuth common-band filtering (FCOMPLEX format) (enter - for none)
SLC2Rs:
SLC2R after range spectral shift and azimuth common-band filtering (FCOMPLEX format) (enter - for none)
SLC_1s_par:
SLC1s ISP image parameter file (enter - for none)
SLC_2Rs_par:
SLC2Rs ISP image parameter file (enter - for none)
az_beta:
azimuth common-band filter Kaiser window parameter (enter - for default: 2.120)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_intf', SLC1, SLC2R, SLC1_par, SLC2R_par, OFF_par, interf, rlks, azlks, loff, nlines, sps_flg, azf_flg, rp1_flg, rp2_flg, SLC1s, SLC2Rs, SLC_1s_par, SLC_2Rs_par, az_beta]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_intf2(SLC1, SLC2R, SLC1_par, SLC2R_par, MLI1, MLI2R, MLI1_par, MLI2R_par, interf, cc, r_dec, az_dec, rwin='-', azwin='-', wflg='-', n_ovr='-', sim_phase='-', lanczos='-', beta='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate interferogram and MLI images from SLCs with separate averaging window dimensions and decimation factors
| Copyright 2025, Gamma Remote Sensing, v2.3 20-May-2025 clw/cm/of
Parameters
----------
SLC1:
(input) single-look complex image 1 (reference)
SLC2R:
(input) single-look complex image 2 coregistered to SLC1
SLC1_par:
(input) SLC1 image parameter file
SLC2R_par:
(input) SLC2R image parameter file for the co-registered image
MLI1:
(output) multi-look intensity image derived from SLC1 (enter - for none)
MLI2R:
(output) multi-look intensity image derived from SLC2R (enter - for none)
MLI1_par:
(output) MLI image parameter file derived from SLC1_par (enter - for none)
MLI2R_par:
(output) MLI image parameter file derived from SLC2R_par (enter - for none)
interf:
(output) complex interferogram from SLC1 and SLC2R (enter - for none)
cc:
(output) interferometric correlation magnitude of SLC1 and SLC2R (enter - for none)
r_dec:
range decimation factor (int)
az_dec:
azimuth decimation factor (int)
rwin:
averaging window width (int) (enter - for default: r_dec)
azwin:
averaging window height (int) (enter - for default: az_dec)
wflg:
window weighting function (enter - for default):
* 0: rectangular (default)
* 1: Kaiser
* 2: circular Gaussian
n_ovr:
oversampling factor 1 -> 2 (enter - for default: 1)
sim_phase:
(input) simulated interferometric phase, coregistered MLI1 (FLOAT, enter - for none)
lanczos:
Lanczos interpolator order 5 -> 9 (enter - for default: 7)
beta:
Gaussian or Kaiser window parameter (enter - for default: 2.0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_intf2', SLC1, SLC2R, SLC1_par, SLC2R_par, MLI1, MLI2R, MLI1_par, MLI2R_par, interf, cc, r_dec, az_dec, rwin, azwin, wflg, n_ovr, sim_phase, lanczos, beta]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_mosaic_range(SLC_tab, SLC, SLC_par, mode='-', order='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate mosaic of Stripmap SLC data provided in multiple pieces in range direction (e.g. PALSAR-3)
| Copyright 2025, Gamma Remote Sensing v1.1 5-Feb-2025 cm/clw/uw
Parameters
----------
SLC_tab:
(input) 2 column list of Stripmap SLC pieces (from near to far range)
SLC_tab line entries: SLC SLC_par
SLC:
(output) SLC mosaic image
SLC_par:
(output) SLC mosaic image parameter file
mode:
complex data interpolation mode in range (enter - for default)
* 0: Lanczos (default)
* 1: B-spline
* 2: nearest neighbor
order:
Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_mosaic_range', SLC_tab, SLC, SLC_par, mode, order]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_mosaic_ScanSAR(SLC_tab, SLC, SLC_par, rlks, azlks, bflg='-', SLCR_tab='-', logpath=None, outdir=None, shellscript=None):
"""
| Calculate SLC mosaic of ScanSAR SLC burst data (Sentinel-1, TerraSAR-X, RCM...)
| Copyright 2025, Gamma Remote Sensing v5.0 14-Jan-2025 clw/awi/cm
Parameters
----------
SLC_tab:
(input) 3 column list of ScanSAR SLC, swaths are listed in order from near to far range
SLC_tab line entries: SLC SLC_par TOPS_par
SLC:
(output) SLC mosaic image
SLC_par:
(output) SLC mosaic image parameter file
rlks:
number of range looks used to determine burst window boundaries for the mosaic
azlks:
number of azimuth looks used to determine burst window boundaries for the mosaic
bflg:
burst window calculation flag (enter - for default)
* 0: use existing burst window parameters if they exist, otherwise calculate burst window parameters (default)
* 1: calculate burst window parameters from burst parameters and the number of range and azimuth looks
SLCR_tab:
(input) 3 column list of the reference scene, swaths are listed in order from near to far range (enter - for none)
SLCR_tab line entries: SLC SLC_par TOPS_par
* NOTE: When generating a mosaic of a resampled SLC, the SLC_tab of the reference scene is required
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_mosaic_ScanSAR', SLC_tab, SLC, SLC_par, rlks, azlks, bflg, SLCR_tab]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_ovr(SLC, SLC_par, SLC_ovr, SLC_ovr_par, r_ovr='-', az_ovr='-', mode='-', order='-', deramp='-', logpath=None, outdir=None, shellscript=None):
"""
| Oversample SLC data in range and azimuth using 2-D Lanczos or B-spline interpolation
| Copyright 2024, Gamma Remote Sensing, v1.6 1-Feb-2024 clw/cm
Parameters
----------
SLC:
(input) SLC image (FCOMPLEX or SCOMPLEX format)
SLC_par:
(input) SLC image parameter file
SLC_ovr:
(output) oversampled SLC image
SLC_ovr_par:
(output) oversampled SLC image parameter file
r_ovr:
range oversampling factor (enter - for default: 1.0)
az_ovr:
azimuth oversampling factor (enter - for default: 1.0)
mode:
interpolation mode (enter - for default)
* 0: Lanczos interpolation (default)
* 1: B-spline interpolation
order:
Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)
deramp:
deramp flag (enter - for default)
* 0: do not deramp and reramp data
* 1: deramp data before interpolation and reramp afterwards (default)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_ovr', SLC, SLC_par, SLC_ovr, SLC_ovr_par, r_ovr, az_ovr, mode, order, deramp]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_phase_shift(SLC1, SLC1_par, SLC2, SLC2_par, ph_shift, logpath=None, outdir=None, shellscript=None):
"""
| Add a constant phase from an SLC image
| Copyright 2023, Gamma Remote Sensing, v1.3 24-Apr-2023 clw
Parameters
----------
SLC1:
(input) SLC data file (fcomplex or scomplex format)
SLC1_par:
(input) SLC parameter file
SLC2:
(output) SLC with phase shift
SLC2_par:
(output) SLC parameter file for the output SLC
ph_shift:
phase shift to add to SLC phase (radians)
* NOTE: Used to apply a constant phase shift of -1.25 radians to Sentinel-1 TOPS SLC data
from swath IW1 acquired up to 10-Mar-2015.
Used to apply a constant phase shift of -3.83 radians to Sentinel-1 TOPS SLC data with
H-POL on receive (e.g. VH) acquired up to 10-Mar-2015.
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_phase_shift', SLC1, SLC1_par, SLC2, SLC2_par, ph_shift]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_RFI_filt(SLC, SLC_par, SLC_filt, rfi_thres='-', nfft_r='-', nfft_az='-', r_step='-', az_step='-', mwin_r='-', mwin_az='-', logpath=None, outdir=None, shellscript=None):
"""
| Adaptive RFI filtering for SLC image using median spectral filtering
| Copyright 2023, Gamma Remote Sensing, v1.6 18-Apr-2023 clw
Parameters
----------
SLC:
(input) SLC to be filtered (FCOMPLEX or SCOMPLEX)
SLC_par:
(input) reference SLC parameter file
SLC_filt:
(output) output filtered SLC using the power spectrum of the reference SLC
rfi_thres:
RFI threshold (enter - for default: 10.00)
nfft_r:
range filter FFT window size, 2\\*\\*N, 16->1024, (enter - for default: 128)
nfft_az:
azimuth filter FFT window size, 2\\*\\*N, 16->1024, (enter - for default: 128)
r_step:
range processing step (enter - for default: nfft_r/4)
az_step:
azimuth processing step (enter - for default: nfft_az/4)
mwin_r:
range median window size for median PSD filtering (enter - for default: 5)
mwin_az:
azimuth median window size for median PSD filtering (enter - for default: 5)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_RFI_filt', SLC, SLC_par, SLC_filt, rfi_thres, nfft_r, nfft_az, r_step, az_step, mwin_r, mwin_az]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_RFI_filt2(SLC, SLC_par, SLC_filt, rfi_thres='-', method='-', f_bs='-', bs_width='-', roff='-', nr='-', azoff='-', naz='-', pltflg='-', logpath=None, outdir=None, shellscript=None):
"""
| RFI filtering for SLC image using a band-stop filter
| Copyright 2024, Gamma Remote Sensing, v1.5 2-Feb-2024 cm
Parameters
----------
SLC:
(input) SLC to be filtered (FCOMPLEX or SCOMPLEX)
SLC_par:
(input) reference SLC parameter file
SLC_filt:
(output) output filtered SLC (same format as SLC)
rfi_thres:
RFI threshold in dB above reference (enter - for default: auto)
method:
RFI detection method (enter - for default)
* 0: threshold above median
* 1: threshold using spectrum symmetry (default)
f_bs:
center or seed frequency of band-stop filter in Hz (-fadc/2.0 <= f_bs < fadc/2.0, enter - for default: auto)
bs_width:
width of band-stop filter in Hz (enter - for default: auto)
roff:
offset to starting range sample to filter (enter - for default: 0)
nr:
number of range samples to filter (enter - for default: to end of line)
azoff:
offset to starting azimuth line to filter (enter - for default: 0)
naz:
number of azimuth lines to filter (enter - for default: to end of file)
pltflg:
range spectrum plotting flag (enter - for default)
* 0: none
* 1: output plot in PNG format (default)
* 2: screen output plot
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_RFI_filt2', SLC, SLC_par, SLC_filt, rfi_thres, method, f_bs, bs_width, roff, nr, azoff, naz, pltflg]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def split_WB(data_in, data_par_in, data_tab, dtype, logpath=None, outdir=None, shellscript=None):
"""
| ISP: Program GAMMA_SOFTWARE-20250625/ISP/bin/split_WB
| Split WB mosaic image into individual beams using ISP parameter files
| Copyright 2022, Gamma Remote Sensing, v1.4 8-Nov-2022 clw/cm
Parameters
----------
data_in:
(input) input mosaicked data in slant-range geometry (e.g. DEM data)
data_par_in:
(input) ISP image parameter file for data in the input mosaic
data_tab:
(input) 2 column list of output data filenames and ISP image parameter files for each beam in the mosaic (text)
dtype:
(input) input data type:
* 0: FLOAT
* 1: FCOMPLEX
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/split_WB', data_in, data_par_in, data_tab, dtype]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SR_to_GRD(MLI_par, OFF_par, GRD_par, in_file, out_file, rlks='-', azlks='-', interp_mode='-', grd_rsp='-', grd_azsp='-', degree='-', logpath=None, outdir=None, shellscript=None):
"""
| Conversion to ground range for ISP MLI and INSAR data of type FLOAT
| Copyright 2023, Gamma Remote Sensing, v2.5 18-Apr-2023 uw/clw/cm
Parameters
----------
MLI_par:
(input) MLI image parameter file of the slant-range image
OFF_par:
(input) ISP OFF_par of the input image (enter - when the image geometry specified by the MLI_par)
GRD_par:
(input/output) image parameter file of output ground range image
in_file:
(input) slant range image (FLOAT)
out_file:
(output) ground range image (FLOAT)
rlks:
multi-looking in range (prior to resampling, enter - for default: 1)
azlks:
multi-looking in azimuth (prior to resampling, enter - for default: 1)
interp_mode:
interpolation mode (enter - for default)
* 0: nearest-neighbor
* 1: bicubic spline
* 2: bicubic spline log(x)
* 3: bicubic spline sqrt(x)
* 4: B-spline interpolation (default B-spline degree: 3)
* 5: B-spline interpolation sqrt(x) (default) (default B-spline degree: 3)
* NOTE: log and sqrt interpolation modes should only be used with non-negative data!
grd_rsp:
output image ground range sample spacing (m) (enter - for default: (input image azimuth spacing) \\* azlks)
grd_azsp:
output image azimuth sample spacing (m) (enter - for default: (input image azimuth spacing) \\* azlks)
degree:
B-spline degree (2->9) (enter - for default: 3)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SR_to_GRD', MLI_par, OFF_par, GRD_par, in_file, out_file, rlks, azlks, interp_mode, grd_rsp, grd_azsp, degree]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def subtract_phase(interf_in, phase_file, interf_out, width, factor='-', logpath=None, outdir=None, shellscript=None):
"""
| ISP: Program GAMMA_SOFTWARE-20250625/ISP/bin/subtract_phase
| Subtract scaled phase image from a complex interferogram
| Copyright 2023, Gamma Remote Sensing, v3.3 19-Apr-2023 uw/clw
Parameters
----------
interf_in:
(input) input interferogram (FCOMPLEX)
phase_file:
(input) unwrapped interferometric phase (FLOAT)
interf_out:
(output) output interferogram (input interferogram - scaled phase) (FCOMPLEX)
width:
number of samples/line
factor:
constant scale factor for input phase data (enter - for default: 1.0)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/subtract_phase', interf_in, phase_file, interf_out, width, factor]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def tree_cc(flag, width, mbl='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Phase unwrapping tree generation with low correlation search (modified ARW algorithm)
| Copyright 2023, Gamma Remote Sensing, v3.1 18-Apr-2023 clw/uw
Parameters
----------
flag:
(input) phase unwrapping flag file
width:
number of samples/row
mbl:
maximum branch length (enter - for default: 32, maximum=64)
xmin:
starting range pixel offset (enter - for default: 0)
xmax:
last range pixel offset (enter - for default: width-1)
ymin:
starting azimuth row, relative to start (enter - for default: 0)
ymax:
last azimuth row, relative to start (enter - for default: nlines-1)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/tree_cc', flag, width, mbl, xmin, xmax, ymin, ymax]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def tree_gzw(flag, width, mbl='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Phase unwrapping tree generation (GZW algorithm)
| Copyright 2023, Gamma Remote Sensing, v3.8 18-Apr-2023 clw/uw
Parameters
----------
flag:
(input) phase unwrapping flag file
width:
number of samples/row
mbl:
maximum branch length (enter - for default: 32)
xmin:
starting range pixel offset (enter - for default: 0)
xmax:
last range pixel offset (enter - for default: width-1)
ymin:
starting azimuth row, relative to start (enter - for default: 0)
ymax:
last azimuth row, relative to start (enter - for default: nlines-1)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/tree_gzw', flag, width, mbl, xmin, xmax, ymin, ymax]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def unw_model(interf, unw_model, unw, width, xinit='-', yinit='-', ref_ph='-', width_model='-', logpath=None, outdir=None, shellscript=None):
"""
| Phase unwrapping using a model of the unwrapped phase
| Copyright 2023, Gamma Remote Sensing, v1.9 21-Sep-2023 clw/uw
Parameters
----------
interf:
(input) complex interferogram
unw_model:
(input) approximate unwrapped phase model (float)
unw:
(output) unwrapped phase (float)
width:
number of samples/row of the interferogram
xinit:
offset to phase reference location in range (col) (enter - for default: 0)
yinit:
offset to phase reference location in azimuth (row) (enter - for default: 0)
ref_ph:
reference point phase (radians) (enter - for phase at the reference point)
width_model:
number of samples/row of the unwrapped phase model (enter - for default: interferogram width)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/unw_model', interf, unw_model, unw, width, xinit, yinit, ref_ph, width_model]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def bpf_ssi(SLC, SLC_par, SLC_flow, SLC_flow_par, SLC_fhigh, SLC_fhigh_par, rbs='-', logpath=None, outdir=None, shellscript=None):
"""
| bpf_ssi: Apply band-pass filtering for split-spectrum interferometry
| Copyright 2023 Gamma Remote Sensing, v1.4 19-Apr-2023 uw/cm
Parameters
----------
SLC:
(input) SLC (FCOMPLEX or SCOMPLEX, SLC should not be resampled)
SLC_par:
(input) SLC parameter file
SLC_flow:
(output) low frequency band filtered SLC (FCOMPLEX or SCOMPLEX)
SLC_flow_par:
(output) low frequency band filtered SLC parameter file
SLC_fhigh:
(output) high frequency band filtered SLC (FCOMPLEX or SCOMPLEX)
SLC_fhigh_par:
(output) high frequency band filtered SLC parameter file (FCOMPLEX or SCOMPLEX)
rbs:
relative range spectrum band separation (enter - for default: 0.6666 --> lowest and highest third of processing bandwidth)
indicate - for the output files to only calculate filtering parameters
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/bpf_ssi', SLC, SLC_par, SLC_flow, SLC_flow_par, SLC_fhigh, SLC_fhigh_par, rbs]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def bpf_ssi_S1(SLC_tab, SLC_tab_flow, SLC_tab_high, rbs='-', logpath=None, outdir=None, shellscript=None):
"""
| bpf_ssi_S1: Apply band-pass filtering for split-spectrum interferometry for S1 TOPS data
| Copyright 2023 Gamma Remote Sensing, v1.2 19-Apr-2023 uw/cm
Parameters
----------
SLC_tab:
(input) SLC_tab
SLC_tab_flow:
(output) output SLC_tab filename for low frequency band filtered SLC
SLC_tab_high:
(output) output SLC_tab filename for high frequency band filtered SLC
rbs:
relative range spectrum band separation (enter - for default: 0.6666 --> lowest and highest third of processing bandwidth)
indicate - for the output files to only calculate filtering parameters
The filename in SLC_tab_flow and SLC_tab_high are automatically generated by adding .flow and .fhigh
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/bpf_ssi_S1', SLC_tab, SLC_tab_flow, SLC_tab_high, rbs]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def get_GAMMA_RASTER(mode, logpath=None, outdir=None, shellscript=None):
"""
| Script to determine the default extension for raster images or the operating system type
| Copyright 2019 Gamma Remote Sensing, v1.3 1-Apr-2019 clw/uw/cm
Parameters
----------
mode:
Specify the script string output:
* 0: raster file extension (ras, bmp, or tif)
* 1: OS type: Linux, MINGW64_NT-10.0, CYGWIN_NT-10.0, darwin...
* NOTE: The default raster format on Linux systems is SUN_RASTER (\\*.ras), for all other operating systems it is BMP (\\*.bmp).
SUN_RASTER and BMP images are limited in size to 32767 x 32767. TIFF files do not have this limitation.
To set the default image raster format for Gamma programs, set the environment variable GAMMA_RASTER as follows:
bash:
export GAMMA_RASTER=SUN_RASTER #extension: ras
export GAMMA_RASTER=BMP #extension: bmp
export GAMMA_RASTER=TIFF #extension: tif
csh,tcsh:
setenv GAMMA_RASTER SUN_RASTER #extension: ras
setenv GAMMA_RASTER BMP #extension: bmp
setenv GAMMA_RASTER TIFF #extension: tif
Environment variables can be set either in processing scripts, or in the shell initialization file (e.g. .bashrc)
Programs in the Gamma software that generate raster image files query the value of GAMMA_RASTER if it has been defined.
This script can be called from within another script to determine the default raster image format or OS type:
bash: $ext=`get_GAMMA_RASTER 0`
csh,tcsh: set ext=`get_GAMMA_RASTER 0`
The variable $ext can then be used to specify the format of the output raster file by using it to construct
the output file name:
bash: $my_raster=$my_name"."$ext
csh/tcsh: set my_raster=$my_name"."$ext
OS: Linux
GAMMA_RASTER value: TIFF
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/get_GAMMA_RASTER', mode]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def INTF_SLC(pass1, pass2, rlks, azlks, algorithm='-', cc_win='-', r_pos='-', az_pos='-', logpath=None, outdir=None, shellscript=None):
"""
| INTF_SLC: calculate interferogram, co-registered SLC, intensity images, and correlation
| Copyright 2023 Gamma Remote Sensing, v1.2 18-Apr-2023 clw/uw/cm
Parameters
----------
pass1:
pass 1 identifier (example: pass number) reference
pass2:
pass 2 identifier (example: pass number)
rlks:
number of range looks
azlks:
number of azimuth looks
algorithm:
algorithm used to determine offsets (enter - for default)
* 1: intensity image cross correlation (default)
* 2: fringe visibility
cc_win:
window used for estimation of the correlation coefficient (enter - for default: 3)
r_pos:
range position of center of image patch for initial offset (enter - for default: image center)
az_pos:
azimuth position of center of image patch for initial offset (enter - for default: image center)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/INTF_SLC', pass1, pass2, rlks, azlks, algorithm, cc_win, r_pos, az_pos]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ionosphere_check(SLC, par, rwin='-', azwin='-', thresh='-', rstep='-', azstep='-', cleaning='-', use_existing='-', logpath=None, outdir=None, shellscript=None):
"""
| ionosphere_check: Determine azimuth spectrum sub-band range and azimuth offsets of a single SLC
| Significant non-zero azimuth offsets are a clear indication for the presence of ionospheric effects
| Copyright 2024 Gamma Remote Sensing, v1.8 11-Dec-2024 uw/cm
Parameters
----------
SLC:
(input) SLC image (e.g. 20070214.slc)
par:
(input) SLC parameter file (e.g. 20070214.slc.par)
rwin:
range window size used in offset estimation (enter - for default: 256)
azwin:
azimuth window size used in offset estimation (enter - for default: 256)
thresh:
threshold value used in offset estimation (enter - for default: 0.1)
rstep:
range step used in offset estimation (enter - for default: rwin/4)
azstep:
azimuth step used in offset estimation (enter - for default: azwin/4)
cleaning:
cleaning flag (enter - for default)
* 0: no cleaning, keep intermediate files
* 1: delete intermediate files (default)
use_existing:
use files generated in a previous run to speed up processing (enter - for default)
* 0: no (default)
* 1: yes
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/ionosphere_check', SLC, par, rwin, azwin, thresh, rstep, azstep, cleaning, use_existing]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def make_tab(list, tab, template, logpath=None, outdir=None, shellscript=None):
"""
| GAMMA_SOFTWARE-20250625/ISP/scripts/make_tab
| Generate a table file from a list or multi-colum table using a text template
| Copyright 2024, Gamma Remote Sensing, v1.1 22-Apr-2024 cm/clw/uw
Parameters
----------
list:
(input) list or multi-column table (text)
tab:
(output) table file (text)
template:
template definition used to generate a line of the output table, entered between single quotes.
Placeholders , , ... specify the columns of the input table.
(example 1: '$1.slc $1.slc.par')
(example 2: '$1_$2.base $1_$2.off')
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/make_tab', list, tab, template]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def mk_ptarg(RSLC_tab, cal_dir, r_samp, az_samp, osf='-', options='-', logpath=None, outdir=None, shellscript=None):
"""
| GAMMA_SOFTWARE-20250625/ISP/scripts/mk_ptarg
| Copyright 2023, Gamma Remote Sensing, v1.6 18-Apr-2023 clw
| Perform point target analysis on a stack of coregistered SLCs
Parameters
----------
RSLC_tab:
(input) two column list of coregistered SLC filenames and SLC parameter filenames (including paths) (ascii)
1. SLC filename (includes path)
2. SLC parameter filename (includes path)
cal_dir:
directory for output calibration results
r_samp:
(input) calibration target range sample number
az_samp:
(input) calibration target azimuth line number
osf:
SLC over-sampling factor 2, 4, 8, 16, 32, 64 (enter - for default: 16)
-s scale (option) set image display scale factor (default: 0.3)
-e exp (option) set image display exponent (default: 0.5)
options:
not documented
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/mk_ptarg', RSLC_tab, cal_dir, r_samp, az_samp, osf, options]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def mk_ptarg_cal(CR_tab, SLC, SLC_par, cal_dir, sigma, c_rpos, c_azpos, osf='-', options='-', logpath=None, outdir=None, shellscript=None):
"""
| GAMMA_SOFTWARE-20250625/ISP/scripts/mk_ptarg_cal
| Copyright 2023, Gamma Remote Sensing, v2.1 18-Apr-2023 clw
| Perform point target analysis and calibration factor evaluation for a set of point targers
Parameters
----------
CR_tab:
(input) 3 column list of row and sample number of corner reflectors
1. Corner reflector id
2. SLC column (includes path)
3. SLC row (includes path)
SLC:
SLC image
SLC_par:
SLC_parameter file
cal_dir:
directory for output calibration results
sigma:
Radar cross-section of the corner reflectors
c_rpos:
range sample number of the center of the region used to estimate region
c_azpos:
azimuth line of the center of the region used to estimate clutter
osf:
SLC over-sampling factor 2, 4, 8, 16, 32, 64 (enter - for default: 16)
-s scale (option) set image display scale factor (default: 0.2)
-e exp (option) set image display exponent (default: 0.5)
options:
not documented
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/mk_ptarg_cal', CR_tab, SLC, SLC_par, cal_dir, sigma, c_rpos, c_azpos, osf, options]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def mk_tab3(dir, ext1, ext2, ext3, tab, logpath=None, outdir=None, shellscript=None):
"""
| Copyright 2023, Gamma Remote Sensing, v1.1 24-Apr-2023 clw
| Generate SLC_tab, MLI_tab, or RAW_list for processing
Parameters
----------
dir:
(input) directory including paths that contain the data files
ext1:
(input) pattern to select data files (examples: slc, raw...), (enter - for all files in the directory)
ext2:
(input) pattern to select parameter files that match the data (enter - for none, examples: slc.par, raw_par, raw.par)
ext3:
(input) pattern to select parameter files that match the data (enter - for none, examples: ppar)
tab:
(output) list of data filenames and associated parameter files (including paths) (text)
* NOTE: The current directory is denoted using .
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/mk_tab3', dir, ext1, ext2, ext3, tab]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_plot_az(offset, r_min, r_max, r_plot, az_plot, logpath=None, outdir=None, shellscript=None):
"""
| IPTA script: GAMMA_SOFTWARE-20250625/ISP/scripts/offset_plot_az
| Copyright 2023, Gamma Remote Sensing, v1.4 17-Apr-2023 clw
| extract range and azimuth offsets for a range window from an text offset file
Parameters
----------
offset:
(input) list of range and azimuth offsets generated by offset_pwr (text)
r_min:
minimum range pixel number to extract range and azimuth offsets
r_max:
minimum range pixel number to extract range and azimuth offsets
r_plot:
range offsets xmgrace plot file
az_plot:
azimuth offsets xmgrace plot file
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/offset_plot_az', offset, r_min, r_max, r_plot, az_plot]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def offset_plot_r(offset, az_min, az_max, r_plot, az_plot, logpath=None, outdir=None, shellscript=None):
"""
| IPTA script: GAMMA_SOFTWARE-20250625/ISP/scripts/offset_plot_r
| Copyright 2004, Gamma Remote Sensing, v1.3 17-Jan-2005 clw
| extract range and azimuth offsets for an azimuth window from an text offset file
Parameters
----------
offset:
(input) list of range and azimuth offsets generated by offset_pwr (text)
az_min:
minimum azimuth line number to extract range and azimuth offsets
az_max:
minimum azimuth line number to extract range and azimuth offsets
r_plot:
range offsets xmgrace plot file
az_plot:
azimuth offsets xmgrace plot file
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/offset_plot_r', offset, az_min, az_max, r_plot, az_plot]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def OPOD_vec(SLC_par, OPOD_dir, nstate='-', logpath=None, outdir=None, shellscript=None):
"""
| GAMMA_SOFTWARE-20250625/ISP/scripts/OPOD_vec
| Copyright 2025, Gamma Remote Sensing, v1.7 4-Feb-2025 clw/awi/cm
| Extract Sentinel-1 state vectors from an OPOD file and write these state vectors to an SLC parameter file
Parameters
----------
SLC_par:
(input/output) ISP SLC/MLI image parameter file
OPOD_dir:
(input) directory containing Sentinel-1 precise or restituted OPOD orbit data files (AUX_POEORB or AUX_RESORB)
orbit files can be downloaded from https://s1qc.asf.alaska.edu/ or https://dataspace.copernicus.eu/
nstate:
number of state vectors to extract (enter - for default: include 60 sec extention at the start and end of the SLC data)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/OPOD_vec', SLC_par, OPOD_dir, nstate]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def run_all(list, command, log='-', logpath=None, outdir=None, shellscript=None):
"""
| GAMMA_SOFTWARE-20250625/ISP/scripts/run_all
| Run a single command iterating over arguments constructed from the elements of a list or multi-column table
| Copyright 2025, Gamma Remote Sensing, v1.7 11-Mar-2025 clw/cm
Parameters
----------
list:
(input) list or multi-column table (text)
command:
command template, entered between single quotes. Command arguments are constructed
with placeholders $1, $2, ... that specify the columns of the input table.
(example 1: 'multi_look $1.slc $1.slc.par $1.mli $1.mli.par 5 1')
(example 2: 'cp -r $1 $2')
log:
(output) log file that captures all screen output (both stdout and stderr) (enter - for none)
Example: run_all dates 'multi_look $1.slc $1.slc.par $1.mli $1.mli.par 5 1' log
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/run_all', list, command, log]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def S1_BURST_tab(SLC1_tab, SLC2_tab, BURST_tab, logpath=None, outdir=None, shellscript=None):
"""
| GAMMA_SOFTWARE-20250625/ISP/scripts/S1_BURST_tab
| Copyright 2023, Gamma Remote Sensing, v1.5 18-Apr-2023 clw/cm
| Calculate Sentinel BURST_tab based on parameters extracted from SLC parameter files listed in SLC1_tab and SLC2_tab
| Running SLC_copy_ScanSAR using BURST_tab will generate SLC2 data with matching bursts for each swath of SLC1 and SLC2
Parameters
----------
SLC1_tab:
(input) 3 column list of the reference TOPS SLC swaths in row order IW1, IW2, IW3
SLC2_tab:
(input) 3 column list of TOPS SLC2 swaths to be resampled to the geometry of the reference SLC1 in row order IW1, IW2, IW3.
BURST_tab:
(output) 2 column list of the first and last bursts to copy from each swath, one line for each swath
BURST_tab line entries: first_burst last_burst Note: first burst is 1
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_BURST_tab', SLC1_tab, SLC2_tab, BURST_tab]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def S1_BURST_tab_from_zipfile(zipfile_list, zipfile_ref, burst_number_table_ref='-', cleaning='-', logpath=None, outdir=None, shellscript=None):
"""
| S1_BURST_tab_from_zipfile: Script used to generate S1_BURST_tab to support burst selection
| Copyright 2021 Gamma Remote Sensing, v1.8 26-Jan-2021 uw/cm
|
| NOTE: S1_BURST_tab_from_zipfile now calls S1_BURST_tab_from_zipfile.py
| Using directly S1_BURST_tab_from_zipfile.py gives access to
| additional useful options and is therefore recommended.
|
Parameters
----------
zipfile_list:
(input) ASCII file containing S1 zip filename(s) of one data take
indicate - to generate burst_number_table of reference TOPS SLC
zipfile_ref:
(input) S1 zip filename for the reference TOPS SLC
burst_number_table_ref:
(input) ASCII file containing first/last burst numbers selected
indicate - to use all bursts as present in the reference TOPS SLC zipfile
cleaning:
flag to indicate if intermediate files are deleted (default=1: yes, 0: not deleted)
intermediate and output filenames are generated based on the zip file names
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_BURST_tab_from_zipfile', zipfile_list, zipfile_ref, burst_number_table_ref, cleaning]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def S1_extract_png(zipfile, logpath=None, outdir=None, shellscript=None):
"""
| S1_extract_png: Script used to extract (and rename) quicklook (png file) from a S1 ZIP file
| Copyright 2019 Gamma Remote Sensing, v1.1 22-Mar-2019 uw/cm
Parameters
----------
zipfile:
(input) Sentinel-1 zipfile (GRD or SLC)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_extract_png', zipfile]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def S1_GRD_preproc(S1_list, MLI_dir, pol, log, options='-', logpath=None, outdir=None, shellscript=None):
"""
| Preprocessing of Sentinel-1 TOPS GRD products, extract GRD data and generate MLI products
| Copyright 2023, Gamma Remote Sensing, v1.3 18-Apr-2023 clw/cm
Parameters
----------
S1_list:
(input) single column text file. Entries are directories (including path) containing Sentinel-1 TOPS GRD products
MLI_dir:
directory for output SLC data files and SLC parameter files
* NOTE: output file names have the form : 20150119_hh.mli
pol:
SLC polarization to extract (hh,hv,vh,vv)
log:
(output) S1 GRD pre-processing log file
-c (option) apply radiometric calibration factor without noise subtraction
-n (option) apply radiometric calibration factor with noise subtraction
-t (option) include full timestamp YYYYMMDDtHHMMSSin SLC and SLC_par filenames, default YYYYMMDD
options:
not documented
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_GRD_preproc', S1_list, MLI_dir, pol, log, options]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def S1_path_number(S1_zipfile, logpath=None, outdir=None, shellscript=None):
"""
| S1_path_number: Script to determine S1 path (or track) number
| Copyright 2025 Gamma Remote Sensing, v1.3 3-Feb-2025 uw/cm/oc
Parameters
----------
S1_zipfile:
(input) S1 zip filename for the TOPS SLC
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_path_number', S1_zipfile]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def S1_TOPS_preproc(S1_list, SLC_dir, pol, log, options='-', logpath=None, outdir=None, shellscript=None):
"""
| Preprocessing of Sentinel-1 TOPS SLC products, extract SLC data and generate SLC_tab
| Copyright 2023, Gamma Remote Sensing, v2.8 18-Apr-2023 clw/awi/cm
Parameters
----------
S1_list:
(input) single column text file. Enteries are directories (including path) containing Sentinel-1 TOPS SLC products
SLC_dir:
directory for output SLC data files and SLC parameter files
Note: output file names have the form : 20150119_iw1_hh.slc
pol:
SLC polarization to extract (hh,hv,vh,vv)
log:
(output) S1 SLC pre-processing log file
-c (option) apply radiometric calibration factor without noise subtraction
-n (option) apply radiometric calibration factor with noise subtraction
-s (option) output is SCOMPLEX format (default: FCOMPLEX)
-t (option) include full timestamp YYYYMMDDtHHMMSS in SLC and SLC_par filenames, default YYYYMMDD
-m MLI_dir (option) calculate MLI images and store in MLI_dir, enter . for current directory
-r rlks (option) number of MLI range looks (default: 10)
-a azlks (option) number of MLI azimuth looks (default: 2)
-b SLC_tab (option) SLC_tab filename, by default SLC_tab_YYMMDD or SLC_tab_YYYYMMDDtHHMMSS
options:
not documented
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_TOPS_preproc', S1_list, SLC_dir, pol, log, options]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SBI_INT(RSLC1, RSLC1_par, RSLC2, RSLC2_par, sbi, off, sbi_pwr, par_out, norm_sq='-', rlks='-', azlks='-', iwflg='-', cflg='-', logpath=None, outdir=None, shellscript=None):
"""
| SBI_INT: Script to generate azimuth Split-Beam Interferogram from a coregistered interferometric SLC pair
| Copyright 2023 Gamma Remote Sensing, v1.4 19-Apr-2023 uw/clw/cm
Parameters
----------
RSLC1:
(input) master single-look complex image (FCOMPLEX or SCOMPLEX)
RSLC1_par:
(input) SLC ISP image parameter file of RSLC1
RSLC2:
(input) co-registered slave SLC image (FCOMPLEX or SCOMPLEX)
RSLC2_par:
(input) SLC ISP image parameter file of RSLC2
sbi:
(output) multi-look split-beam interferogram (FCOMPLEX)
off:
(output) ISP offset parameter file for multi-look split-beam interferogram (ascii)
sbi_pwr:
(output) multi-look reference backscatter intensity image (FLOAT)
par_out:
(output) SLC/MLI ISP image parameter file of sbi_pwr
norm_sq:
normalized squint difference parameter (enter - for default: 0.5)
rlks:
number of range looks in output split-beam interferogram (enter - for default: 1)
azlks:
number of azimuth looks in output split-beam interferogram (enter - for default: 1)
iwflg:
inverse weighting flag (enter - for default)
* 0: do not remove azimuth processing spectral window (default)
* 1: apply inverse of azimuth compression processing window
cflg:
flag to indicate if intermediate data (e.g. filtered slc) are deleted (enter - for default)
* 0: intermediate data are deleted (default)
* 1: intermediate data are NOT deleted
file names for band-pass filtered SLC are generated automatically
by adding the letter b / f for the backward / foward looking beam
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/SBI_INT', RSLC1, RSLC1_par, RSLC2, RSLC2_par, sbi, off, sbi_pwr, par_out, norm_sq, rlks, azlks, iwflg, cflg]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ScanSAR_burst_cc_ad(DIFF_tab, MLI1_tab, MLI2R_tab, slope_tab, texture_tab, CC_tab, log, box_min='-', box_max='-', wgt_flag='-', logpath=None, outdir=None, shellscript=None):
"""
| Estimate interferometric coherence for ScanSAR burst data using cc_ad
| Copyright 2023, Gamma Remote Sensing, v1.2 17-Apr-2023 cm
Parameters
----------
DIFF_tab:
(input) 3 column list of the DIFF swaths listed in order from near to far range
DIFF_tab line entries: DIFF MLI_par TOPS_par
MLI1_tab:
(input) 3 column list of the reference ScanSAR MLI swaths listed in order from near to far range (enter - for none)
MLI1_tab line entries: MLI MLI_par TOPS_par
MLI2R_tab:
(input) 3 column list of ScanSAR MLI swaths listed in order from near to far range, coregistered with MLI1 (enter - for none)
MLI2R_tab line entries: MLI MLI_par TOPS_par
slope_tab:
(input) 1 column list of ScanSAR phase slope swaths listed in order from near to far range (enter - for none)
texture_tab:
(input) 1 column list of ScanSAR backscatter texture swaths listed in order from near to far range (enter - for none)
CC_tab:
(input/output) 3 column list of the CC swaths listed in order from near to far range
CC_tab line entries: CC MLI_par TOPS_par
* NOTE: if CC_tab does not exist, it will be created in the current directory.
The binary file will be named from the differential interferogram name, with the addition of a ".cc" extension.
The MLI_par and TOPS_par files are copied from MLI1_tab if available, from DIFF_tab otherwise.
log:
(output) processing log file
box_min:
smallest correlation average box size (enter - for default: 3.0)
box_max:
largest correlation average box size (enter - for default: 9.0)
wgt_flag:
weighting function (enter - for default)
* 0: constant (default)
* 1: gaussian
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/ScanSAR_burst_cc_ad', DIFF_tab, MLI1_tab, MLI2R_tab, slope_tab, texture_tab, CC_tab, log, box_min, box_max, wgt_flag]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def ScanSAR_burst_cc_wave(DIFF_tab, MLI1_tab, MLI2R_tab, CC_tab, log, bx='-', by='-', wflg='-', logpath=None, outdir=None, shellscript=None):
"""
| Estimate interferometric coherence for ScanSAR burst data using cc_wave
| Copyright 2019, Gamma Remote Sensing, v1.2 24-Apr-2019 cm
Parameters
----------
DIFF_tab:
(input) 3 column list of the DIFF swaths listed in order from near to far range
DIFF_tab line entries: DIFF MLI_par TOPS_par
MLI1_tab:
(input) 3 column list of the reference ScanSAR MLI swaths listed in order from near to far range (enter - for none)
MLI1_tab line entries: MLI MLI_par TOPS_par
MLI2R_tab:
(input) 3 column list of ScanSAR MLI swaths listed in order from near to far range, coregistered with MLI1 (enter - for none)
MLI2R_tab line entries: MLI MLI_par TOPS_par
CC_tab:
(input/output) 3 column list of the CC swaths listed in order from near to far range
CC_tab line entries: CC MLI_par TOPS_par
* NOTE: if CC_tab does not exist, it will be created in the current directory.
The binary file will be named from the differential interferogram name, with the addition of a ".cc" extension.
The MLI_par and TOPS_par files are copied from MLI1_tab if available, from DIFF_tab otherwise.
log:
(output) processing log file
bx:
estimation window size in columns (enter - for default: 5.0)
by:
estimation window size in lines (enter - for default: 5.0)
wflg:
estimation window (enter - for default):
* 0: rectangular (default)
* 1: triangular
* 2: Gaussian
* 3: normalized vector sum with rectangular window
* NOTE: This estimator does not use the MLI data, even when specified
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/ScanSAR_burst_cc_wave', DIFF_tab, MLI1_tab, MLI2R_tab, CC_tab, log, bx, by, wflg]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def SLC_copy_WB(SLC_tab, SLC2_dir, logpath=None, outdir=None, shellscript=None):
"""
| GAMMA_SOFTWARE-20250625/ISP/scripts/SLC_copy_WB
| Copyright 2011, Gamma Remote Sensing, v1.1 9-Apr-2011 clw
| Create a new set of SLCs for all beams in a PALSAR WB ScanSAR image
Parameters
----------
SLC_tab:
(input) two column list of input SLC files and SLC ISP image parameter files (including paths) (text)
SLC2_dir:
directory to contain copied segments of the input SLC data and the associated parameter files
* NOTE: current directory is denoted using .
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/SLC_copy_WB', SLC_tab, SLC2_dir]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def TX_SLC_preproc(TSX_list, SLC_dir, log, logpath=None, outdir=None, shellscript=None):
"""
| Preprocessing of TerraSAR-X TDX1 and TSX1 SLC products using par_TX_SLC
| Copyright 2023, Gamma Remote Sensing, v1.3 17-Apr-2023 clw
Parameters
----------
TSX_list:
(input) single column text file with directories (including path)
containing path to directory containing product XML for IMAGEDATA/\\*.cos files
SLC_dir:
directory for output SLC data files and SLC parameter files
log:
(output) processing log file
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/TX_SLC_preproc', TSX_list, SLC_dir, log]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def unw_correction_filt(unw_in, unw_out, width, fsize='-', thresh1='-', thresh2='-', iterations='-', cleaning='-', logpath=None, outdir=None, shellscript=None):
"""
| unw_correction_filt: Phase unwrapping ambiguity error correction relative to spatially filtered phase
| Copyright 2023 Gamma Remote Sensing, v1.5 18-Apr-2023 uw/cm
Parameters
----------
unw_in:
(input) unwrapped phase file to correct (float)
unw_out:
(output) corrected unwrapped phase file (float)
width:
number of range samples per line
fsize:
maximum filter radius in pixels (enter - for default: 5)
thresh1:
upper threshold for negative phase differences (enter - for default: -3.0)
thresh2:
lower threshold for positive phase differences (enter - for default: 3.0)
iterations:
number of iterations to run (enter - for default: 1)
cleaning:
cleaning flag indicating if intermediary files are deleted (enter - for default)
* 0: no
* 1: yes (default)
The difference between the unfiltered and spatially filtered phase (using fspf) is used
to determine an correct phase unwrapping ambiguity errors
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/unw_correction_filt', unw_in, unw_out, width, fsize, thresh1, thresh2, iterations, cleaning]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def unw_correction_poly(unw_in, unw_out, width, poly, flag, max_iter='-', logpath=None, outdir=None, shellscript=None):
"""
| unw_correction_poly: Phase unwrapping ambiguity error correction for polygon areas
| Copyright 2023 Gamma Remote Sensing, v1.5 18-Apr-2023 uw/cm
Parameters
----------
unw_in:
(input) unwrapped phase file to correct (FLOAT)
unw_out:
(output) corrected unwrapped phase file (FLOAT)
width:
number of range samples per line
poly:
(input) polygon file (text)
flag:
ambiguity correction flag (1: add 2PI; -1: subtract 2PI)
max_iter:
maximum number of iterations done (enter - for default: 1)
(iterations are used (a) if the ambiguity to correct is not 2PI but a
multiple of 2PI and (b) if the ambiguity error is in an area with a
significant phase slope)
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/unw_correction_poly', unw_in, unw_out, width, poly, flag, max_iter]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def UNWRAP(interf, cc, pwr, unwrap, flag, width, lines, corr_thr='-', pwr_thr='-', r_init='-', az_init='-', r1='-', r2='-', l1='-', l2='-', logpath=None, outdir=None, shellscript=None):
"""
| UNWRAP: unwrap phase
| Copyright 2023 Gamma Remote Sensing, v1.4 19-Apr-2023 clw/cm
Parameters
----------
interf:
interferogram filename (\\*.int, \\*.flt)
cc:
correlation filename (\\*.cc)
pwr:
intensity image (\\*.pwr, \\*.mli)
unwrap:
unwrap output file (\\*.unw)
flag:
unwapping flag file (\\*.flag)
width:
interferogram width
lines:
number of interferogram lines
corr_thr:
threshold for correlation in the unwrapping mask (enter - for default: 0.7)
pwr_thr:
intensity threshold for phase unwrapping neutrons, multiples of average (enter - for default: 6.0)
r_init:
range seed location in the interferogram (enter - for default: width/2)
az_init:
azimuth seed location in the interferogram (enter - for default: nlines/2)
r1:
starting range sample offset to unwrap (enter - for default: 0)
r2:
ending range sample offset to unwrap (enter - for default: width-1)
l1:
starting line offset to unwrap (enter - for default: 0)
l2:
ending line offset to unwrap (enter - for default: nlines-1)\n
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/UNWRAP', interf, cc, pwr, unwrap, flag, width, lines, corr_thr, pwr_thr, r_init, az_init, r1, r2, l1, l2]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
def UNWRAP_PAR(interf_par, interf, cc, pwr, unwrap, flag, corr_thr='-', pwr_thr='-', r_init='-', az_init='-', r1='-', r2='-', l1='-', l2='-', logpath=None, outdir=None, shellscript=None):
"""
| UNWRAP_PAR: unwrap phase using parameters from the ISP interferogram parameter file
| Copyright 2023 Gamma Remote Sensing, v1.3 19-Apr-2023 clw/cm
Parameters
----------
interf_par:
interferogram parameter file \\*.off
interf:
interferogram filename (\\*.int, \\*.flt)
cc:
correlation filename (\\*.cc)
pwr:
intensity image (\\*.pwr, \\*.mli)
unwrap:
unwrap output file (\\*.unw)
flag:
unwapping flag file (\\*.flag)
corr_thr:
threshold for correlation in the unwrapping mask (enter - for default: 0.7)
pwr_thr:
intensity threshold for phase unwrapping neutrons, multiples of average (enter - for default: 6.0)
r_init:
range seed location in the interferogram (enter - for default: width/2)
az_init:
azimuth seed location in the interferogram (enter - for default: nlines/2)
r1:
starting range sample offset to unwrap (enter - for default: 0)
r2:
ending range sample offset to unwrap (enter - for default: width-1)
l1:
starting line offset to unwrap (enter - for default: 0)
l2:
ending line offset to unwrap (enter - for default: nlines-1)\n
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
"""
cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/UNWRAP_PAR', interf_par, interf, cc, pwr, unwrap, flag, corr_thr, pwr_thr, r_init, az_init, r1, r2, l1, l2]
process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)
================================================
FILE: pyroSAR/gamma/util.py
================================================
###############################################################################
# universal core routines for processing SAR images with GAMMA
# Copyright (c) 2014-2026, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
"""
This module is intended as a set of generalized processing routines for modularized GAMMA work flows.
The function parametrization is intended to be applicable to any kind of situation and input data set.
Thus, instead of choosing a specific parametrization for the data at hand,
core parameters are iterated over a set of values in order to find the one best suited for the task.
The approach of the single routines is likely to still have drawbacks and might fail in certain situations.
Testing and suggestions on improvements are very welcome.
"""
import os
import re
import shutil
import zipfile as zf
from datetime import datetime
from urllib.error import URLError
import numpy as np
from spatialist import haversine, Raster
from spatialist.ancillary import union, finder
from ..S1 import OSV
from ..drivers import ID, identify_many
from . import ISPPar, Namespace, par2hdr
from ..ancillary import multilook_factors, hasarg, groupby, Lock
from pyroSAR.examine import ExamineSnap, ExamineGamma
from .auxil import do_execute
import logging
log = logging.getLogger(__name__)
try:
from .api import diff, disp, isp, lat
except ImportError:
pass
def calibrate(id, directory, return_fnames=False,
logpath=None, outdir=None, shellscript=None):
"""
radiometric calibration of SAR scenes
Parameters
----------
id: ~pyroSAR.drivers.ID
an SAR scene object of type pyroSAR.ID or any subclass
directory: str
the directory to search for GAMMA calibration candidates
return_fnames: bool
return the names of the output image files? Default: False.
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the GAMMA commands to in shell format
Returns
-------
List[str] or None
"""
cname = type(id).__name__
new = []
if cname == 'CEOS_PSR':
for image in id.getGammaImages(directory):
if image.endswith('_slc'):
isp.radcal_SLC(SLC=image,
SLC_par=image + '.par',
CSLC=image + '_cal',
CSLC_par=image + '_cal.par',
K_dB=id.meta['k_dB'],
logpath=logpath,
outdir=outdir,
shellscript=shellscript)
par2hdr(image + '_cal.par', image + '_cal.hdr')
new.append(image + '_cal')
elif cname == 'EORC_PSR':
for image in id.getGammaImages(directory):
if image.endswith('_mli'):
isp.radcal_MLI(MLI=image,
MLI_par=image + '.par',
OFF_par='-',
CMLI=image + '_cal',
antenna='-',
rloss_flag=0,
ant_flag=0,
refarea_flag=1,
sc_dB=0,
K_dB=id.meta['k_dB'],
pix_area=image + '_cal_pix_ell',
logpath=logpath,
outdir=outdir,
shellscript=shellscript)
par2hdr(image + '.par', image + '_cal.hdr')
par2hdr(image + '.par', image + '_cal_pix_ell' + '.hdr')
# rename parameter file
os.rename(image + '.par', image + '_cal.par')
new.append(image + '_cal')
elif cname == 'ESA':
k_db = {'ASAR': 55., 'ERS1': 58.24, 'ERS2': 59.75}[id.sensor]
inc_ref = 90. if id.sensor == 'ASAR' else 23.
imgs = id.getGammaImages(directory)
candidates = [x for x in imgs if re.search('_pri$', x)]
for image in candidates:
out = image.replace('pri', 'grd')
isp.radcal_PRI(PRI=image,
PRI_par=image + '.par',
GRD=out,
GRD_par=out + '.par',
K_dB=k_db,
inc_ref=inc_ref,
logpath=logpath,
outdir=outdir,
shellscript=shellscript)
par2hdr(out + '.par', out + '.hdr')
new.append(out)
elif cname == 'SAFE':
log.info('calibration already performed during import')
else:
msg = f'calibration for class {cname} is not implemented yet'
raise NotImplementedError(msg)
if return_fnames and len(new) > 0:
return new
def convert2gamma(id, directory, S1_tnr=True, S1_bnr=True,
basename_extensions=None, exist_ok=False,
return_fnames=False,
logpath=None, outdir=None, shellscript=None):
"""
general function for converting SAR images to GAMMA format
Parameters
----------
id: ~pyroSAR.drivers.ID
an SAR scene object of type pyroSAR.ID or any subclass
directory: str
the output directory for the converted images
S1_tnr: bool
only Sentinel-1: should thermal noise removal be applied to the image?
S1_bnr: bool
only Sentinel-1 GRD: should border noise removal be applied to the image?
This is available since version 20191203, for older versions this argument is ignored.
basename_extensions: list[str] or None
names of additional parameters to append to the basename, e.g. ['orbitNumber_rel']
exist_ok: bool
allow existing output files and do not create new ones?
return_fnames: bool
return the names of the output image files? Default: False.
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the GAMMA commands to in bash format
Returns
-------
list[str] or None
the sorted image file names if ``return_fnames=True`` and None otherwise
"""
if not isinstance(id, ID):
raise IOError('id must be of type pyroSAR.ID')
if id.compression is not None:
raise RuntimeError('scene is not yet unpacked')
os.makedirs(directory, exist_ok=True)
fnames = []
cname = type(id).__name__
if cname == 'CEOS_ERS':
if id.sensor in ['ERS1', 'ERS2']:
if id.product == 'SLC' \
and id.meta['proc_system'] in ['PGS-ERS', 'VMP-ERS', 'SPF-ERS']:
outname_base = id.outname_base(extensions=basename_extensions)
outname_base = '{}_{}_{}'.format(outname_base,
id.polarizations[0],
id.product.lower())
outname = os.path.join(directory, outname_base)
if not os.path.isfile(outname):
lea = id.findfiles('LEA_01.001')[0]
dat = id.findfiles('DAT_01.001')[0]
title = re.sub(r'\.PS$', '', os.path.basename(id.file))
pars = {'CEOS_SAR_leader': lea,
'SLC_par': outname + '.par',
'CEOS_DAT': dat,
'SLC': outname,
'inlist': [title],
'logpath': logpath,
'outdir': outdir,
'shellscript': shellscript}
with Lock(outname):
if do_execute(pars, ['SLC', 'SLC_par'], exist_ok):
isp.par_ESA_ERS(**pars)
par2hdr(outname + '.par', outname + '.hdr')
fnames.append(outname)
else:
log.info('scene already converted')
else:
raise NotImplementedError('ERS {} product of {} processor in CEOS format not implemented yet'
.format(id.product, id.meta['proc_system']))
else:
raise NotImplementedError('sensor {} in CEOS format not implemented yet'.format(id.sensor))
elif cname == 'CEOS_PSR':
images = id.findfiles('^IMG-')
if id.product == '1.0':
raise RuntimeError('PALSAR level 1.0 products are not supported')
for image in images:
polarization = re.search('[HV]{2}', os.path.basename(image)).group(0)
outname_base = id.outname_base(extensions=basename_extensions)
pars = {'CEOS_leader': id.file,
'CEOS_data': image,
'logpath': logpath,
'outdir': outdir,
'shellscript': shellscript}
if id.product == '1.1':
outname_base = '{}_{}_slc'.format(outname_base, polarization)
outname = os.path.join(directory, outname_base)
pars['SLC'] = outname
pars['SLC_par'] = outname + '.par'
with Lock(outname):
if do_execute(pars, ['SLC', 'SLC_par'], exist_ok):
isp.par_EORC_PALSAR(**pars)
par2hdr(outname + '.par', outname + '.hdr')
else:
outname_base = '{}_{}_mli_geo'.format(outname_base, polarization)
outname = os.path.join(directory, outname_base)
pars['MLI'] = outname
pars['MLI_par'] = outname + '.par'
pars['DEM_par'] = outname + '_dem.par'
with Lock(outname):
if do_execute(pars, ['MLI', 'MLI_par', 'DEM_par'], exist_ok):
diff.par_EORC_PALSAR_geo(**pars)
par2hdr(outname + '.par', outname + '.hdr')
fnames.append(outname)
elif cname == 'EORC_PSR':
images = id.findfiles('^sar.')
facter_m = id.findfiles('facter_m.dat')
led = id.findfiles('LED-ALOS2')
for image in images:
polarization = re.search('[HV]{2}', os.path.basename(image)).group(0)
outname_base = id.outname_base(extensions=basename_extensions)
outname_base = '{}_{}'.format(outname_base, polarization)
outname = os.path.join(directory, outname_base) + '_mli'
fnames.append(outname)
pars = {'facter_m': facter_m,
'CEOS_leader': led,
'SLC_par': outname + '.par',
'pol': polarization,
'pls_mode': 2,
'KC_data': image,
'pwr': outname,
'logpath': logpath,
'outdir': outdir,
'shellscript': shellscript}
with Lock(outname):
if do_execute(pars, ['pwr', 'SLC_par'], exist_ok):
isp.par_KC_PALSAR_slr(**pars)
par2hdr(outname + '.par', outname + '.hdr')
elif cname == 'ESA':
"""
the command par_ASAR also accepts a K_dB argument for calibration
in which case the resulting image names will carry the suffix grd;
this is not implemented here but instead in function calibrate
"""
outname = os.path.join(directory, id.outname_base(extensions=basename_extensions))
with Lock(outname):
isp.par_ASAR(ASAR_ERS_file=os.path.basename(id.file),
output_name=outname,
outdir=os.path.dirname(id.file),
logpath=logpath,
shellscript=shellscript)
os.remove(outname + '.hdr')
for item in finder(directory, [os.path.basename(outname)], regex=True):
ext = '.par' if item.endswith('.par') else ''
outname_base = os.path.basename(item) \
.strip(ext) \
.replace('.', '_') \
.replace('PRI', 'pri') \
.replace('SLC', 'slc')
outname = os.path.join(directory, outname_base + ext)
os.rename(item, outname)
fnames.append(outname)
if outname.endswith('.par'):
par2hdr(outname, outname.replace('.par', '.hdr'))
elif cname == 'SAFE':
if id.product == 'OCN':
raise IOError('Sentinel-1 OCN products are not supported')
if id.meta['category'] == 'A':
raise IOError('Sentinel-1 annotation-only products are not supported')
for xml_ann in finder(os.path.join(id.scene, 'annotation'), [id.pattern_ds], regex=True):
base = os.path.basename(xml_ann)
match = re.compile(id.pattern_ds).match(base)
tiff = os.path.join(id.scene, 'measurement', base.replace('.xml', '.tiff'))
xml_cal = os.path.join(id.scene, 'annotation', 'calibration', 'calibration-' + base)
product = match.group('product')
# In versions released before July 2015, it was assumed that noise was already
# removed in GRDs and specifying the XML file meant adding it back to the data.
version = ExamineGamma().version
if version < '20150701':
c = (S1_tnr and product == 'slc') or (not S1_tnr and product == 'grd')
else:
c = S1_tnr
if c:
xml_noise = os.path.join(id.scene, 'annotation', 'calibration', 'noise-' + base)
else:
xml_noise = '-'
fields = (id.outname_base(extensions=basename_extensions),
match.group('pol').upper(),
product)
basename = '_'.join(fields)
outname = os.path.join(directory, basename)
pars = {'GeoTIFF': tiff,
'annotation_XML': xml_ann,
'calibration_XML': xml_cal,
'noise_XML': xml_noise,
'logpath': logpath,
'shellscript': shellscript,
'outdir': outdir}
if product == 'slc':
swath = match.group('swath').upper()
old = '{:_<{length}}'.format(id.acquisition_mode, length=len(swath))
base_new = basename.replace(old, swath)
outname = os.path.join(os.path.dirname(outname), base_new)
pars['SLC'] = outname
pars['SLC_par'] = outname + '.par'
pars['TOPS_par'] = outname + '.tops_par'
with Lock(outname):
if do_execute(pars, ['SLC', 'SLC_par', 'TOPS_par'], exist_ok):
isp.par_S1_SLC(**pars)
par2hdr(outname + '.par', outname + '.hdr')
else:
if hasarg(isp.par_S1_GRD, 'edge_flag'):
if S1_bnr:
pars['edge_flag'] = 2
else:
pars['edge_flag'] = 0
else:
if S1_bnr:
raise RuntimeError("The command par_S1_GRD of this GAMMA "
"version does not support border noise "
"removal. You may want to consider "
"pyroSAR's own method for this task.")
pars['MLI'] = outname
pars['MLI_par'] = outname + '.par'
with Lock(outname):
if do_execute(pars, ['MLI', 'MLI_par'], exist_ok):
isp.par_S1_GRD(**pars)
par2hdr(outname + '.par', outname + '.hdr')
fnames.append(outname)
elif cname == 'TSX':
images = id.findfiles(id.pattern_ds)
pattern = re.compile(id.pattern_ds)
for image in images:
pol = pattern.match(os.path.basename(image)).group('pol')
outname_base = id.outname_base(extensions=basename_extensions)
outname = os.path.join(directory, outname_base + '_' + pol)
pars = {'annotation_XML': id.file,
'pol': pol,
'logpath': logpath,
'shellscript': shellscript,
'outdir': outdir}
if id.product == 'SSC':
outname += '_slc'
pars['COSAR'] = image
pars['SLC_par'] = outname + '.par'
pars['SLC'] = outname
with Lock(outname):
if do_execute(pars, ['SLC', 'SLC_par'], exist_ok):
isp.par_TX_SLC(**pars)
par2hdr(outname + '.par', outname + '.hdr')
elif id.product == 'MGD':
outname += '_mli'
pars['GeoTIFF'] = image
pars['GRD_par'] = outname + '.par'
pars['GRD'] = outname
with Lock(outname):
if do_execute(pars, ['GRD', 'GRD_par'], exist_ok):
isp.par_TX_GRD(**pars)
par2hdr(outname + '.par', outname + '.hdr')
elif id.product in ['GEC', 'EEC']:
outname += '_mli_geo'
pars['GeoTIFF'] = image
pars['MLI_par'] = outname + '.par'
pars['DEM_par'] = outname + '_dem.par'
pars['GEO'] = outname
with Lock(outname):
if do_execute(pars, ['GEO', 'MLI_par', 'DEM_par'], exist_ok):
diff.par_TX_geo(**pars)
par2hdr(outname + '.par', outname + '.hdr')
else:
raise RuntimeError('unknown product: {}'.format(id.product))
fnames.append(outname)
else:
raise NotImplementedError('conversion for class {} is not implemented yet'.format(cname))
if return_fnames:
return sorted(fnames)
def correctOSV(id, directory, osvdir=None, osvType='POE', timeout=20,
logpath=None, outdir=None, shellscript=None, url_option=1):
"""
correct GAMMA parameter files with orbit state vector information from dedicated OSV files;
OSV files are downloaded automatically to either the defined `osvdir` or relative to the
user's home directory: `~/.snap/auxdata/Orbits/Sentinel-1`.
Parameters
----------
id: ~pyroSAR.drivers.ID
the scene to be corrected
directory: str or None
a directory to be scanned for files associated with the scene, e.g. an SLC in GAMMA format.
If the OSV file is packed in a zip file it will be unpacked to a subdirectory `osv`.
osvdir: str or None
the directory of the OSV files. Default None: use the SNAP directory
as configured via `pyroSAR.examine.ExamineSnap` or, if SNAP is not
installed, `~/.snap/auxdata/Orbits/Sentinel-1` (SNAP default).
Subdirectories POEORB and RESORB are created automatically.
osvType: str or list[str]
the OSV type (POE|RES) to be used
timeout: int or tuple or None
the timeout in seconds for downloading OSV files as provided to :func:`requests.get`
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the GAMMA commands to in shell format
url_option: int
the OSV download URL option; see :meth:`pyroSAR.S1.OSV.catch`
Returns
-------
Examples
--------
>>> from pyroSAR import identify
>>> from pyroSAR.gamma import correctOSV, convert2gamma
>>> filename = 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'
# identify the SAR scene
>>> scene = identify(filename)
# unpack the zipped scene to an arbitrary directory
>>> scene.unpack('/home/test')
>>> print(scene.scene)
/home/test/S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.SAFE
# convert the unpacked scene to GAMMA format
>>> convert2gamma(id=scene, directory=scene.scene)
# correct the OSV information of the converted GAMMA images
>>> correctOSV(id=scene, osvdir='/home/test/osv')
See Also
--------
:meth:`pyroSAR.drivers.SAFE.getOSV`
:class:`pyroSAR.S1.OSV`
"""
if not isinstance(id, ID):
raise IOError('id must be of type pyroSAR.ID')
if id.sensor not in ['S1A', 'S1B', 'S1C', 'S1D']:
raise IOError('this method is currently only available for Sentinel-1. Please stay tuned...')
if not os.path.isdir(logpath):
os.makedirs(logpath)
if osvdir is None:
try:
auxdatapath = ExamineSnap().auxdatapath
except AttributeError:
auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')
osvdir = os.path.join(auxdatapath, 'Orbits', 'Sentinel-1')
try:
id.getOSV(osvdir, osvType, timeout=timeout, url_option=url_option)
except URLError:
log.warning('..no internet access')
parfiles = finder(directory, ['*.par'])
parfiles = [x for x in parfiles if ISPPar(x).filetype == 'isp']
# read parameter file entries into object
with ISPPar(parfiles[0]) as par:
# extract acquisition time stamp
timestamp = datetime.strptime(par.date, '%Y-%m-%dT%H:%M:%S.%f').strftime('%Y%m%dT%H%M%S')
# find an OSV file matching the time stamp and defined OSV type(s)
with OSV(osvdir, timeout=timeout) as osv:
osvfile = osv.match(sensor=id.sensor, timestamp=timestamp, osvtype=osvType)
if not osvfile:
raise RuntimeError('no Orbit State Vector file found')
if osvfile.endswith('.zip'):
osvdir = os.path.join(directory, 'osv')
with zf.ZipFile(osvfile) as zip:
zip.extractall(path=osvdir)
osvfile = os.path.join(osvdir, os.path.basename(osvfile).replace('.zip', ''))
# update the GAMMA parameter file with the selected orbit state vectors
log.debug('correcting state vectors with file {}'.format(osvfile))
for par in parfiles:
log.debug(par)
with Lock(par.replace('.par', '')):
isp.S1_OPOD_vec(SLC_par=par,
OPOD=osvfile,
logpath=logpath,
outdir=outdir,
shellscript=shellscript)
def gc_map_wrap(image, namespace, dem, spacing, exist_ok=False,
logpath=None, outdir=None, shellscript=None):
"""
helper function for computing DEM products in function geocode.
Parameters
----------
image: str
the reference SAR image
namespace: pyroSAR.gamma.auxil.Namespace
an object collecting all output file names
dem: str
the digital elevation model
spacing: int or float
the target pixel spacing in meters
exist_ok: bool
allow existing output files and do not create new ones?
logpath: str
a directory to write command logfiles to
outdir: str
the directory to execute the command in
shellscript: str
a file to write the GAMMA commands to in shell format
Returns
-------
"""
# compute DEM oversampling factors; will be 1 for range and
# azimuth if the DEM spacing matches the target spacing
ovs_lat, ovs_lon = ovs(dem + '.par', spacing)
image_par = ISPPar(image + '.par')
gc_map_args = {'DEM_par': dem + '.par',
'DEM': dem,
'DEM_seg_par': namespace.dem_seg_geo + '.par',
'DEM_seg': namespace.dem_seg_geo,
'lookup_table': namespace.lut_init,
'lat_ovr': ovs_lat,
'lon_ovr': ovs_lon,
'sim_sar': namespace.sim_sar_geo,
'u': namespace.u_geo,
'v': namespace.v_geo,
'inc': namespace.inc_geo,
'psi': namespace.psi_geo,
'pix': namespace.pix_geo,
'ls_map': namespace.ls_map_geo,
'frame': 8,
'ls_mode': 2,
'logpath': logpath,
'shellscript': shellscript,
'outdir': outdir}
out_id = ['DEM_seg_par', 'DEM_seg', 'lookup_table', 'sim_sar',
'u', 'v', 'inc', 'psi', 'pix', 'ls_map']
# remove all output files to make sure they are replaced and not updated
if not exist_ok:
for id in out_id:
base = gc_map_args[id]
if base != '-':
for suffix in ['', '.par', '.hdr']:
fname = base + suffix
if os.path.isfile(fname):
os.remove(fname)
if image_par.image_geometry == 'GROUND_RANGE':
gc_map_args.update({'GRD_par': image + '.par'})
if do_execute(gc_map_args, out_id, exist_ok):
diff.gc_map_grd(**gc_map_args)
else:
gc_map_args.update({'MLI_par': image + '.par'})
if do_execute(gc_map_args, out_id, exist_ok):
# gc_map2 is the successor of gc_map. However, earlier versions
# did not yet come with full functionality.
gc_map2_ok = False
if 'gc_map2' in dir(diff):
keys = list(gc_map_args.keys())
keys.remove('ls_mode')
gc_map2_ok = all([hasarg(diff.gc_map2, x) for x in keys])
if gc_map2_ok:
del gc_map_args['ls_mode']
diff.gc_map2(**gc_map_args)
else:
# gc_map might have an argument OFF_par, which is not needed for SLC/MLI geocoding
if hasarg(diff.gc_map, 'OFF_par'):
gc_map_args.update({'OFF_par': '-'})
diff.gc_map(**gc_map_args)
# create ENVI header files for all created images
for item in ['dem_seg_geo', 'sim_sar_geo', 'u_geo', 'v_geo',
'psi_geo', 'pix_geo', 'inc_geo', 'ls_map_geo']:
if namespace.isappreciated(item):
mods = {'data_type': 1} if item == 'ls_map_geo' else None
par2hdr(namespace.dem_seg_geo + '.par', namespace.get(item) + '.hdr', mods)
def geocode(scene, dem, tmpdir, outdir, spacing, scaling='linear', func_geoback=1,
nodata=(0, -99), update_osv=True, osvdir=None, allow_RES_OSV=False,
cleanup=True, export_extra=None, basename_extensions=None,
removeS1BorderNoiseMethod='gamma', refine_lut=False, rlks=None, azlks=None,
s1_osv_url_option=1):
"""
general function for radiometric terrain correction (RTC) and geocoding of SAR backscatter images with GAMMA.
Applies the RTC method by :cite:t:`Small2011` to retrieve gamma nought RTC backscatter.
Parameters
----------
scene: str or ~pyroSAR.drivers.ID or list
the SAR scene(s) to be processed
dem: str
the reference DEM in GAMMA format
tmpdir: str
a temporary directory for writing intermediate files
outdir: str
the directory for the final GeoTIFF output files
spacing: float or int
the target pixel spacing in meters
scaling: str or list[str]
the value scaling of the backscatter values; either 'linear', 'db' or a list of both, i.e. ['linear', 'db']
func_geoback: {0, 1, 2, 3, 4, 5, 6, 7}
backward geocoding interpolation mode (see GAMMA command `geocode_back`)
- 0: nearest-neighbor
- 1: bicubic spline (default)
- 2: bicubic-spline, interpolate log(data)
- 3: bicubic-spline, interpolate sqrt(data)
- 4: B-spline interpolation (default B-spline degree: 5)
- 5: B-spline interpolation sqrt(x) (default B-spline degree: 5)
- 6: Lanczos interpolation (default Lanczos function order: 5)
- 7: Lanczos interpolation sqrt(x) (default Lanczos function order: 5)
.. note::
log and sqrt interpolation modes should only be used with non-negative data!
.. note::
GAMMA recommendation for MLI data: "The interpolation should be performed on
the square root of the data. A mid-order (3 to 5) B-spline interpolation is recommended."
nodata: tuple[float or int]
the nodata values for the output files; defined as a tuple with two values, the first for linear,
the second for logarithmic scaling
update_osv: bool
update the orbit state vectors?
osvdir: str or None
a directory for Orbit State Vector files;
this is currently only used by for Sentinel-1 where two subdirectories POEORB and RESORB are created;
if set to None, a subdirectory OSV is created in the directory of the unpacked scene.
allow_RES_OSV: bool
also allow the less accurate RES orbit files to be used?
Otherwise the function will raise an error if no POE file exists.
cleanup: bool
should all files written to the temporary directory during function execution be deleted after processing?
export_extra: list[str] or None
a list of image file IDs to be exported to outdir
- format is GeoTIFF if the file is geocoded and ENVI otherwise. Non-geocoded images can be converted via GAMMA
command data2tiff yet the output was found impossible to read with GIS software
- scaling of SAR image products is applied as defined by parameter `scaling`
- see Notes for ID options
basename_extensions: list[str] or None
names of additional parameters to append to the basename, e.g. ['orbitNumber_rel']
removeS1BorderNoiseMethod: str or None
the S1 GRD border noise removal method to be applied, See :func:`pyroSAR.S1.removeGRDBorderNoise` for details; one of the following:
- 'ESA': the pure implementation as described by ESA
- 'pyroSAR': the ESA method plus the custom pyroSAR refinement
- 'gamma': the GAMMA implementation of :cite:`Ali2018`
- None: do not remove border noise
refine_lut: bool
should the LUT for geocoding be refined using pixel area normalization?
rlks: int or None
the number of range looks. If not None, overrides the computation done by function
:func:`pyroSAR.ancillary.multilook_factors` based on the image pixel spacing and the target spacing.
azlks: int or None
the number of azimuth looks. Like `rlks`.
s1_osv_url_option: int
the OSV download URL option; see :meth:`pyroSAR.S1.OSV.catch`
Returns
-------
Note
----
| intermediate output files
| DEM products are named _, e.g. `S1A__IW___A_20141012T162337_inc_geo`
| SAR products will additionally contain the polarization, e.g. `S1A__IW___A_20141012T162337_VV_grd_mli`
| IDs in brackets are only written if selected by `export_extra`
- images in range-Doppler geometry
* **grd**: the ground range detected SAR intensity image
* **grd_mli**: the multi-looked grd image with approximated target resolution
* (**pix_ellip_sigma0**): ellipsoid-based pixel area
* (**pix_area_sigma0**): illuminated area as obtained from integrating DEM-facets in sigma projection (command pixel_area)
* (**pix_area_gamma0**): illuminated area as obtained from integrating DEM-facets in gamma projection (command pixel_area)
* **pix_ratio**: pixel area normalization factor (pix_ellip_sigma0 / pix_area_gamma0)
* **grd_mli_gamma0-rtc**: the terrain-corrected gamma0 backscatter (grd_mli * pix_ratio)
* (**gs_ratio**): gamma-sigma ratio (pix_gamma0 / pix_sigma0)
- images in map geometry
* **dem_seg_geo**: dem subsetted to the extent of the intersection between input DEM and SAR image
* (**u_geo**): zenith angle of surface normal vector n (angle between z and n)
* (**v_geo**): orientation angle of n (between x and projection of n in xy plane)
* **inc_geo**: local incidence angle (between surface normal and look vector)
* (**psi_geo**): projection angle (between surface normal and image plane normal)
* **ls_map_geo**: layover and shadow map
* (**sim_sar_geo**): simulated SAR backscatter image
* (**pix_ellip_sigma0_geo**): ellipsoid-based pixel area
* (**pix_area_sigma0_geo**): illuminated area as obtained from integrating DEM-facets in sigma projection (command pixel_area)
* (**pix_area_gamma0_geo**): illuminated area as obtained from integrating DEM-facets in gamma projection (command pixel_area)
* (**pix_ratio_geo**): pixel area normalization factor (pix_ellip_sigma0 / pix_area_gamma0)
* (**gs_ratio_geo**): gamma-sigma ratio (pix_gamma0 / pix_sigma0)
- additional files
* **lut_init**: initial geocoding lookup table
- files specific to lookup table refinement
* **lut_fine**: refined geocoding lookup table
* **diffpar**: ISP offset/interferogram parameter file
* **offs**: offset estimates (fcomplex)
* **coffs**: culled range and azimuth offset estimates (fcomplex)
* **coffsets**: culled offset estimates and cross correlation values (text format)
* **ccp**: cross-correlation of each patch (0.0->1.0) (float)
Examples
--------
geocode a Sentinel-1 scene and export the local incidence angle map with it
>>> from pyroSAR.gamma import geocode
>>> filename = 'S1A_IW_GRDH_1SDV_20180829T170656_20180829T170721_023464_028DE0_F7BD.zip'
>>> geocode(scene=filename, dem='demfile', outdir='outdir', spacing=20, scaling='db',
>>> export_extra=['dem_seg_geo', 'inc_geo', 'ls_map_geo'])
.. figure:: figures/gamma_geocode.svg
:align: center
Workflow diagram for function geocode for processing a Sentinel-1 Ground Range
Detected (GRD) scene to radiometrically terrain corrected (RTC) gamma nought backscatter.
"""
# experimental option to reuse intermediate products; currently affects:
# - scene unpacking
# - conversion to GAMMA format
# - multilooking
# - DEM product generation
# - terrain flattening
exist_ok = False
scenes = scene if isinstance(scene, list) else [scene]
if len(scenes) > 2:
raise RuntimeError("currently only one or two scenes can be passed via argument 'scene'")
scenes = identify_many(scenes)
ref = scenes[0]
if ref.sensor not in ['S1A', 'S1B', 'S1C', 'S1D', 'PALSAR-2']:
raise RuntimeError(
'this function currently only supports Sentinel-1 and PALSAR-2 Path data. Please stay tuned...')
if export_extra is not None and not isinstance(export_extra, list):
raise TypeError("parameter 'export_extra' must either be None or a list")
tmpdir = os.path.join(tmpdir, ref.outname_base(extensions=basename_extensions))
for dir in [tmpdir, outdir]:
os.makedirs(dir, exist_ok=True)
if ref.is_processed(outdir):
log.info('scene {} already processed'.format(ref.outname_base(extensions=basename_extensions)))
return
shellscript = os.path.join(tmpdir, ref.outname_base(extensions=basename_extensions) + '_commands.sh')
scaling = [scaling] if isinstance(scaling, str) else scaling if isinstance(scaling, list) else []
scaling = union(scaling, ['db', 'linear'])
if len(scaling) == 0:
raise IOError('wrong input type for parameter scaling')
for scene in scenes:
if scene.compression is not None:
log.info('unpacking scene')
try:
scene.unpack(tmpdir, exist_ok=exist_ok)
except RuntimeError:
log.info('scene was attempted to be processed before, exiting')
return
path_log = os.path.join(tmpdir, 'logfiles')
if not os.path.isdir(path_log):
os.makedirs(path_log)
for scene in scenes:
if scene.sensor in ['S1A', 'S1B', 'S1C', 'S1D'] and removeS1BorderNoiseMethod in ['ESA', 'pyroSAR']:
log.info('removing border noise')
scene.removeGRDBorderNoise(method=removeS1BorderNoiseMethod)
log.info('converting scene to GAMMA format')
gamma_bnr = True if removeS1BorderNoiseMethod == 'gamma' else False
images = []
for scene in scenes:
files = convert2gamma(scene, directory=tmpdir, logpath=path_log, outdir=tmpdir,
basename_extensions=basename_extensions, shellscript=shellscript,
S1_bnr=gamma_bnr, exist_ok=exist_ok, return_fnames=True)
images.extend(files)
if update_osv:
for scene in scenes:
if scene.sensor in ['S1A', 'S1B', 'S1C', 'S1D']:
log.info('updating orbit state vectors')
if allow_RES_OSV:
osvtype = ['POE', 'RES']
else:
osvtype = 'POE'
try:
correctOSV(id=scene, directory=tmpdir, osvdir=osvdir, osvType=osvtype,
url_option=s1_osv_url_option,
logpath=path_log, outdir=tmpdir, shellscript=shellscript)
except RuntimeError:
msg = 'orbit state vector correction failed for scene {}'
log.warning(msg.format(scene.scene))
return
log.info('calibrating')
images_cal = []
for scene in scenes:
files = calibrate(id=scene, directory=tmpdir, return_fnames=True,
logpath=path_log, outdir=tmpdir, shellscript=shellscript)
if files is not None:
images_cal.extend(files)
if len(images_cal) > 0:
images = images_cal
if len(scenes) > 1:
images_new = []
groups = groupby(images, 'polarization')
for group in groups:
out = group[0] + '_cat'
out_par = out + '.par'
all_exist = all([os.path.isfile(x) for x in [out, out_par]])
if not all_exist:
log.info('mosaicing scenes')
isp.MLI_cat(MLI1=group[0],
MLI1_par=group[0] + '.par',
MLI2=group[1],
MLI2_par=group[1] + '.par',
MLI3=out,
MLI3_par=out_par,
logpath=path_log, outdir=tmpdir, shellscript=shellscript)
par2hdr(out_par, out + '.hdr')
images_new.append(out)
images = images_new
if scene.sensor in ['S1A', 'S1B', 'S1C', 'S1D']:
log.info('multilooking')
groups = groupby(images, 'polarization')
images = []
for group in groups:
out = group[0].replace('IW1', 'IW_') + '_mli'
infile = group[0] if len(group) == 1 else group
multilook(infile=infile, outfile=out, spacing=spacing,
rlks=rlks, azlks=azlks, exist_ok=exist_ok,
logpath=path_log, outdir=tmpdir, shellscript=shellscript)
images.append(out)
products = list(images)
reference = images[0]
# create output names for files to be written
# appreciated files will be written
n = Namespace(tmpdir, scene.outname_base(extensions=basename_extensions))
n.appreciate(['dem_seg_geo', 'lut_init', 'inc_geo', 'ls_map_geo'])
pix_geo = []
if export_extra is not None:
n.appreciate(export_extra)
pix = ['pix_area_sigma0', 'pix_area_gamma0', 'pix_ratio', 'gs_ratio', 'pix_ellip_sigma0']
for item in pix:
if item + '_geo' in export_extra:
pix_geo.append(item + '_geo')
n.appreciate([item])
if refine_lut:
n.appreciate(['pix_area_sigma0'])
reference_par = ISPPar(reference + '.par')
######################################################################
# geocoding and DEM product generation ###############################
######################################################################
log.info('geocoding and creating DEM products')
gc_map_wrap(image=reference, namespace=n, dem=dem, spacing=spacing, exist_ok=exist_ok,
logpath=path_log, outdir=tmpdir, shellscript=shellscript)
sim_width = ISPPar(n.dem_seg_geo + '.par').width
######################################################################
# RTC reference area computation #####################################
######################################################################
log.info('computing pixel area (for radiometric terrain correction, rtc)')
pixel_area_wrap(image=reference, namespace=n, lut=n.lut_init, exist_ok=exist_ok,
logpath=path_log, outdir=tmpdir, shellscript=shellscript)
######################################################################
# lookup table refinement ############################################
######################################################################
lut_final = n.lut_init
if refine_lut:
log.info('refining lookup table')
# Refinement of geocoding lookup table
diff.create_diff_par(PAR_1=reference + '.par',
PAR_2='-',
DIFF_par=reference + '_diff.par',
PAR_type=1,
iflg=0,
logpath=path_log,
outdir=tmpdir,
shellscript=shellscript)
# Refinement of lookup table
# for "shift" data offset window size enlarged twice to 512 and 256, for data without shift 256 128
diff.offset_pwrm(MLI_1=n.pix_area_sigma0,
MLI_2=reference,
DIFF_par=reference + '_diff.par',
offs=reference + '_offs',
ccp=reference + '_ccp',
rwin=512,
azwin=256,
offsets=reference + '_offsets.txt',
n_ovr=2,
nr=64,
naz=32,
thres=0.2,
logpath=path_log,
outdir=tmpdir,
shellscript=shellscript)
# par2hdr(master + '.par', master + '_offs' + '.hdr')
diff.offset_fitm(offs=reference + '_offs',
ccp=reference + '_ccp',
DIFF_par=reference + '_diff.par',
coffs=reference + '_coffs',
coffsets=reference + '_coffsets',
thres=0.2,
npoly=4,
logpath=path_log,
outdir=tmpdir,
shellscript=shellscript)
# Updating of the look-up table
diff.gc_map_fine(gc_in=lut_final,
width=sim_width,
DIFF_par=reference + '_diff.par',
gc_out=lut_final + '.fine',
ref_flg=1,
logpath=path_log,
outdir=tmpdir,
shellscript=shellscript)
# Reproduce pixel area estimate
pixel_area_wrap(image=reference, namespace=n, lut=lut_final + '.fine',
logpath=path_log, outdir=tmpdir, shellscript=shellscript)
lut_final = lut_final + '.fine'
######################################################################
# radiometric terrain correction and back-geocoding ##################
######################################################################
log.info('applying rtc and back-geocoding')
for image in images:
if 'lat' in locals():
lat.product(data_1=image,
data_2=n.pix_ratio,
product=image + '_gamma0-rtc',
width=reference_par.range_samples,
bx=1,
by=1,
logpath=path_log,
outdir=tmpdir,
shellscript=shellscript)
else:
lat_product(data_in1=image,
data_in2=n.pix_ratio,
data_out=image + '_gamma0-rtc')
par2hdr(reference + '.par', image + '_gamma0-rtc.hdr')
diff.geocode_back(data_in=image + '_gamma0-rtc',
width_in=reference_par.range_samples,
lookup_table=lut_final,
data_out=image + '_gamma0-rtc_geo',
width_out=sim_width,
interp_mode=func_geoback,
logpath=path_log,
outdir=tmpdir,
shellscript=shellscript)
par2hdr(n.dem_seg_geo + '.par', image + '_gamma0-rtc_geo.hdr')
products.extend([image + '_gamma0-rtc', image + '_gamma0-rtc_geo'])
######################################################################
# log scaling and image export #######################################
######################################################################
log.info('conversion to (dB and) GeoTIFF')
def exporter(data_in, outdir, nodata, scale='linear', dtype=2):
if scale == 'db':
if re.search('_geo', os.path.basename(data_in)):
width = sim_width
refpar = n.dem_seg_geo + '.par'
else:
width = reference_par.range_samples
refpar = reference + '.par'
if 'lat' in locals():
lat.linear_to_dB(data_in=data_in,
data_out=data_in + '_db',
width=width,
inverse_flag=0,
null_value=nodata,
logpath=path_log,
outdir=tmpdir,
shellscript=shellscript)
else:
lat_linear_to_db(data_in=data_in,
data_out=data_in + '_db')
par2hdr(refpar, data_in + '_db.hdr')
data_in += '_db'
if re.search('_geo', os.path.basename(data_in)):
outfile = os.path.join(outdir, os.path.basename(data_in) + '.tif')
disp.data2geotiff(DEM_par=n.dem_seg_geo + '.par',
data=data_in,
type=dtype,
GeoTIFF=outfile,
no_data=nodata,
logpath=path_log,
outdir=tmpdir,
shellscript=shellscript)
else:
outfile = os.path.join(outdir, os.path.basename(data_in))
shutil.copyfile(data_in, outfile)
shutil.copyfile(data_in + '.hdr', outfile + '.hdr')
for image in images:
for scale in scaling:
exporter(data_in=image + '_gamma0-rtc_geo', scale=scale, dtype=2,
nodata=dict(zip(('linear', 'db'), nodata))[scale], outdir=outdir)
if scene.sensor in ['S1A', 'S1B', 'S1C', 'S1D']:
outname_base = scene.outname_base(extensions=basename_extensions)
shutil.copyfile(os.path.join(scene.scene, 'manifest.safe'),
os.path.join(outdir, outname_base + '_manifest.safe'))
if export_extra is not None:
log.info('back-geocoding and exporting extra products')
for key in export_extra:
if key in pix_geo:
fname = n.get(key)
diff.geocode_back(data_in=fname.replace('_geo', ''),
width_in=reference_par.range_samples,
lookup_table=lut_final,
data_out=fname,
width_out=sim_width,
interp_mode=func_geoback,
logpath=path_log,
outdir=tmpdir,
shellscript=shellscript)
par2hdr(n.dem_seg_geo + '.par', fname + '.hdr')
# SAR image products
product_match = [x for x in products if x.endswith(key)]
if len(product_match) > 0:
for product in product_match:
for scale in scaling:
exporter(data_in=product, outdir=outdir, scale=scale, dtype=2,
nodata=dict(zip(('linear', 'db'), nodata))[scale])
# ancillary (DEM) products
elif n.isfile(key) and key not in ['lut_init']:
filename = n[key]
dtype = 5 if key == 'ls_map_geo' else 2
nodata = 0
exporter(filename, outdir, dtype=dtype, nodata=nodata)
else:
log.warning('cannot export file {}'.format(key))
shutil.copyfile(shellscript, os.path.join(outdir, os.path.basename(shellscript)))
if cleanup:
log.info('cleaning up temporary files')
shutil.rmtree(tmpdir)
def _delete_product(path):
for item in [path, path + '.hdr', path + '.aux.xml']:
if os.path.isfile(item):
os.remove(item)
def lat_linear_to_db(data_in: str, data_out: str) -> None:
"""
Alternative to LAT module command linear_to_dB.
Parameters
----------
data_in
the input data file
data_out
the output data file
"""
tmp = data_out + '_tmp'
try:
with Raster(data_in) as ras:
a1 = ras.array()
a1[a1 <= 0] = np.nan
out = 10 * np.log10(a1)
out[~np.isfinite(out)] = 0
ras.write(outname=tmp, array=out, format='ENVI',
nodata=0, dtype='float32')
disp.swap_bytes(infile=tmp, outfile=data_out, swap_type=4)
shutil.copy(src=data_in + '.hdr', dst=data_out + '.hdr')
except Exception:
_delete_product(data_out)
raise
finally:
_delete_product(tmp)
def lat_product(data_in1: str, data_in2: str, data_out: str) -> None:
"""
Alternative to LAT module command product.
Parameters
----------
data_in1
input data file 1
data_in2
input data file 2
data_out
the output data file
"""
tmp = data_out + '_tmp'
try:
with Raster(data_in1) as ras:
a1 = ras.array()
a1[a1 == 0] = np.nan
with Raster(data_in2) as ras:
a2 = ras.array()
a2[a2 == 0] = np.nan
out = a1 * a2
out[~np.isfinite(out)] = 0
ras.write(outname=tmp, array=out, format='ENVI',
nodata=0, dtype='float32')
disp.swap_bytes(infile=tmp, outfile=data_out, swap_type=4)
shutil.copy(src=data_in2 + '.hdr', dst=data_out + '.hdr')
except Exception:
_delete_product(data_out)
raise
finally:
_delete_product(tmp)
def lat_ratio(data_in1: str, data_in2: str, data_out: str) -> None:
"""
Alternative to LAT module command ratio.
Parameters
----------
data_in1
input data file 1
data_in2
input data file 2
data_out
the output data file
"""
tmp = data_out + '_tmp'
try:
with Raster(data_in1) as ras:
a1 = ras.array()
a1[a1 == 0] = np.nan
with Raster(data_in2) as ras:
a2 = ras.array()
a2[a2 == 0] = np.nan
out = a1 / a2
out[~np.isfinite(out)] = 0
ras.write(outname=tmp, array=out, format='ENVI',
nodata=0, dtype='float32')
disp.swap_bytes(infile=tmp, outfile=data_out, swap_type=4)
shutil.copy(src=data_in1 + '.hdr', dst=data_out + '.hdr')
except Exception:
_delete_product(data_out)
raise
finally:
_delete_product(tmp)
def multilook(infile, outfile, spacing, rlks=None, azlks=None,
exist_ok=False, logpath=None, outdir=None, shellscript=None):
"""
Multilooking of SLC and MLI images.
If the image is in slant range the ground range resolution is computed by dividing the range pixel spacing by
the sine of the incidence angle.
The looks in range and azimuth are chosen to approximate the target resolution by rounding the ratio between
target resolution and ground range/azimuth pixel spacing to the nearest integer.
An ENVI HDR parameter file is automatically written for better handling in other software.
Parameters
----------
infile: str or list[str]
one of the following:
- a SAR image in GAMMA format with a parameter file .par
- a list of ScanSAR SLC swaths with parameter files .par and .tops_par; in this case a text file
_slc-tab.txt will be created, which is passed to the GAMMA command ``multi_look_ScanSAR``
outfile: str
the name of the output GAMMA MLI file
spacing: int
the target pixel spacing in ground range
rlks: int or None
the number of range looks. If not None, overrides the computation done by function
:func:`pyroSAR.ancillary.multilook_factors` based on the image pixel spacing and the target spacing.
azlks: int or None
the number of azimuth looks. Like `rlks`.
exist_ok: bool
allow existing output files and do not create new ones?
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the GAMMA commands to in shell format
See Also
--------
pyroSAR.ancillary.multilook_factors
"""
# read the input parameter file
if isinstance(infile, str):
par = ISPPar(infile + '.par')
range_pixel_spacing = par.range_pixel_spacing
azimuth_pixel_spacing = par.azimuth_pixel_spacing
incidence_angle = par.incidence_angle
image_geometry = par.image_geometry
image_format = par.image_format
elif isinstance(infile, list):
par = [ISPPar(x + '.par') for x in infile]
range_pixel_spacings = [getattr(x, 'range_pixel_spacing') for x in par]
range_pixel_spacing = sum(range_pixel_spacings) / len(par)
azimuth_pixel_spacings = [getattr(x, 'azimuth_pixel_spacing') for x in par]
azimuth_pixel_spacing = sum(azimuth_pixel_spacings) / len(par)
incidence_angles = [getattr(x, 'incidence_angle') for x in par]
incidence_angle = sum(incidence_angles) / len(par)
image_geometry = par[0].image_geometry
image_format = par[0].image_format
else:
raise TypeError("'infile' must be str or list")
if rlks is None and azlks is None:
rlks, azlks = multilook_factors(source_rg=range_pixel_spacing,
source_az=azimuth_pixel_spacing,
target=spacing,
geometry=image_geometry,
incidence=incidence_angle)
if [rlks, azlks].count(None) > 0:
raise RuntimeError("'rlks' and 'azlks' must either both be integers or None")
pars = {'rlks': rlks,
'azlks': azlks,
'logpath': logpath,
'shellscript': shellscript,
'outdir': outdir}
if image_format in ['SCOMPLEX', 'FCOMPLEX']:
# multilooking of SLC images
pars['MLI'] = outfile
pars['MLI_par'] = outfile + '.par'
if isinstance(infile, str):
pars['SLC'] = infile
pars['SLC_par'] = infile + '.par'
if do_execute(pars, ['MLI', 'MLI_par'], exist_ok):
isp.multi_look(**pars)
par2hdr(outfile + '.par', outfile + '.hdr')
else:
slcpar = [x + '.par' for x in infile]
topspar = [x + '.tops_par' for x in infile]
slc_tab = outfile + '_slc-tab.txt'
if not os.path.isfile(slc_tab) or not exist_ok:
with open(slc_tab, 'w') as tab:
for item in zip(infile, slcpar, topspar):
tab.write(' '.join(item) + '\n')
pars['SLC_tab'] = slc_tab
if do_execute(pars, ['MLI', 'MLI_par'], exist_ok):
if 'multi_look_ScanSAR' in dir(isp):
isp.multi_look_ScanSAR(**pars)
else:
isp.multi_S1_TOPS(**pars)
par2hdr(outfile + '.par', outfile + '.hdr')
else:
# multilooking of MLI images
pars['MLI_in'] = infile
pars['MLI_in_par'] = infile + '.par'
pars['MLI_out'] = outfile
pars['MLI_out_par'] = outfile + '.par'
if do_execute(pars, ['MLI_out', 'MLI_out_par'], exist_ok):
isp.multi_look_MLI(**pars)
par2hdr(outfile + '.par', outfile + '.hdr')
def ovs(parfile, spacing):
"""
compute DEM oversampling factors for a target resolution in meters
Parameters
----------
parfile: str
a GAMMA DEM parameter file
spacing: int or float
the target pixel spacing in meters
Returns
-------
tuple of float
the oversampling factors for latitude and longitude
"""
# read DEM parameter file
dempar = ISPPar(parfile)
# extract coordinates and pixel posting of the DEM
if hasattr(dempar, 'post_north'):
post_north, post_east = [abs(float(x)) for x in
[dempar.post_north, dempar.post_east]]
else:
res_lat, res_lon = [abs(float(x)) for x in [dempar.post_lat, dempar.post_lon]]
# compute center coordinate
lat = float(dempar.corner_lat) - (res_lat * (dempar.nlines // 2))
lon = float(dempar.corner_lon) + (res_lon * (dempar.width // 2))
# convert DEM resolution to meters
post_north = haversine(lat, lon, lat + res_lat, lon)
post_east = haversine(lat, lon, lat, lon + res_lon)
# compute resampling factors for the DEM
ovs_lat = post_north / spacing
ovs_lon = post_east / spacing
return ovs_lat, ovs_lon
def pixel_area_wrap(image, namespace, lut, exist_ok=False,
logpath=None, outdir=None, shellscript=None):
"""
helper function for computing pixel_area files in function geocode.
Parameters
----------
image: str
the reference SAR image
namespace: pyroSAR.gamma.auxil.Namespace
an object collecting all output file names
lut: str
the name of the lookup table
exist_ok: bool
allow existing output files and do not create new ones?
logpath: str
a directory to write command logfiles to
outdir: str
the directory to execute the command in
shellscript: str
a file to write the GAMMA commands to in shell format
Returns
-------
"""
image_par = ISPPar(image + '.par')
if namespace.isappreciated('gs_ratio'):
namespace.appreciate(['pix_area_sigma0', 'pix_area_gamma0'])
pixel_area_args = {'MLI_par': image + '.par',
'DEM_par': namespace.dem_seg_geo + '.par',
'DEM': namespace.dem_seg_geo,
'lookup_table': lut,
'ls_map': namespace.ls_map_geo,
'inc_map': namespace.inc_geo,
'pix_sigma0': namespace.pix_area_sigma0,
'pix_gamma0': namespace.pix_area_gamma0,
'logpath': logpath,
'outdir': outdir,
'shellscript': shellscript}
radcal_mli_args = {'MLI': image,
'MLI_par': image + '.par',
'OFF_par': '-',
'CMLI': image + '_cal',
'refarea_flag': 1, # calculate sigma0, scale area by sin(inc_ang)/sin(ref_inc_ang)
'pix_area': namespace.pix_ellip_sigma0,
'logpath': logpath,
'outdir': outdir,
'shellscript': shellscript}
# newer versions of GAMMA enable creating the ratio of ellipsoid-based
# pixel area and DEM-facet pixel area directly with command pixel_area
if hasarg(diff.pixel_area, 'sig2gam_ratio'):
namespace.appreciate(['pix_ratio'])
pixel_area_args['sig2gam_ratio'] = namespace.pix_ratio
if do_execute(pixel_area_args, ['pix_sigma0', 'pix_gamma0', 'sig2gam_ratio'], exist_ok):
diff.pixel_area(**pixel_area_args)
if namespace.isappreciated('pix_ellip_sigma0'):
if do_execute(radcal_mli_args, ['pix_area'], exist_ok):
isp.radcal_MLI(**radcal_mli_args)
par2hdr(image + '.par', image + '_cal.hdr')
else:
# sigma0 = MLI * ellip_pix_sigma0 / pix_area_sigma0
# gamma0 = MLI * ellip_pix_sigma0 / pix_area_gamma0
namespace.appreciate(['pix_area_gamma0', 'pix_ellip_sigma0', 'pix_ratio'])
pixel_area_args['pix_gamma0'] = namespace.pix_area_gamma0
radcal_mli_args['pix_area'] = namespace.pix_ellip_sigma0
# actual illuminated area as obtained from integrating DEM-facets (pix_area_sigma0 | pix_area_gamma0)
if do_execute(pixel_area_args, ['pix_sigma0', 'pix_gamma0'], exist_ok):
diff.pixel_area(**pixel_area_args)
# ellipsoid-based pixel area (ellip_pix_sigma0)
if do_execute(radcal_mli_args, ['pix_area'], exist_ok):
isp.radcal_MLI(**radcal_mli_args)
par2hdr(image + '.par', image + '_cal.hdr')
if os.path.isfile(image + '.hdr'):
for item in ['pix_area_sigma0', 'pix_area_gamma0', 'pix_ellip_sigma0']:
if namespace.isappreciated(item):
hdr_out = namespace[item] + '.hdr'
c1 = not os.path.isfile(hdr_out)
c2 = os.path.isfile(hdr_out) and not exist_ok
if c1 or c2:
shutil.copy(src=image + '.hdr', dst=hdr_out)
# ratio of ellipsoid-based pixel area and DEM-facet pixel area
c1 = not os.path.isfile(namespace.pix_ratio)
c2 = os.path.isfile(namespace.pix_ratio) and not exist_ok
if c1 or c2:
if 'lat' in locals():
lat.ratio(d1=namespace.pix_ellip_sigma0,
d2=namespace.pix_area_gamma0,
ratio=namespace.pix_ratio,
width=image_par.range_samples,
bx=1,
by=1,
logpath=logpath,
outdir=outdir,
shellscript=shellscript)
else:
for item in ['pix_area_gamma0', 'pix_ellip_sigma0']:
par2hdr(image + '.par', namespace[item] + '.hdr')
lat_ratio(data_in1=namespace.pix_ellip_sigma0,
data_in2=namespace.pix_area_gamma0,
data_out=namespace.pix_ratio)
if namespace.isappreciated('gs_ratio'):
c1 = not os.path.isfile(namespace.gs_ratio)
c2 = os.path.isfile(namespace.gs_ratio) and not exist_ok
if c1 or c2:
if 'lat' in locals():
lat.ratio(d1=namespace.pix_area_gamma0,
d2=namespace.pix_area_sigma0,
ratio=namespace.gs_ratio,
width=image_par.range_samples,
bx=1,
by=1,
logpath=logpath,
outdir=outdir,
shellscript=shellscript)
else:
for item in ['pix_area_gamma0', 'pix_area_sigma0']:
par2hdr(image + '.par', namespace[item] + '.hdr')
lat_ratio(data_in1=namespace.pix_area_gamma0,
data_in2=namespace.pix_area_sigma0,
data_out=namespace.gs_ratio)
for item in ['pix_area_sigma0', 'pix_area_gamma0',
'pix_ratio', 'pix_ellip_sigma0', 'gs_ratio']:
if namespace.isappreciated(item):
hdr_out = namespace[item] + '.hdr'
c1 = not os.path.isfile(item)
c2 = os.path.isfile(hdr_out) and not exist_ok
if c1 or c2:
par2hdr(image + '.par', hdr_out)
def S1_deburst(burst1, burst2, burst3, name_out, rlks=5, azlks=1,
replace=False, logpath=None, outdir=None, shellscript=None):
"""
Debursting of Sentinel-1 SLC imagery in GAMMA
The procedure consists of two steps. First antenna pattern deramping and
then mosaicing of the single deramped bursts.
For mosaicing, the burst boundaries are calculated from the number of looks in range (`rlks`)
and azimuth (`azlks`), in this case 5 range looks and 1 azimuth looks.
Alternately 10 range looks and 2 azimuth looks could be used.
Parameters
----------
burst1: str
burst image 1
burst2: str
burst image 2
burst3: str
burst image 3
name_out: str
the name of the output file
rlks: int
the number of looks in range
azlks: int
the number of looks in azimuth
replace: bool
replace the burst images by the new file? If True, the three burst images will be deleted.
logpath: str or None
a directory to write command logfiles to
outdir: str or None
the directory to execute the command in
shellscript: str or None
a file to write the Gamma commands to in shell format
Returns
-------
"""
for burst in [burst1, burst2, burst3]:
if not os.path.isfile(burst) or not os.path.isfile(burst + '.par') or not os.path.isfile(burst + '.tops_par'):
raise IOError('input files missing; parameter files must be named e.g. {burst1}.par and {burst1}.tops_par')
outpath = os.path.dirname(name_out)
if not os.path.isdir(outpath):
os.makedirs(outpath)
tab_in = os.path.join(outpath, 'tab_deramp1')
tab_out = os.path.join(outpath, 'tab_deramp2')
with open(tab_in, 'w') as out1:
with open(tab_out, 'w') as out2:
for item in [burst1, burst2, burst3]:
out1.write(item + '\t' + item + '.par\t' + item + '.tops_par\n')
out2.write(item + '_drp\t' + item + '_drp.par\t' + item + '_drp.tops_par\n')
isp.SLC_deramp_ScanSAR(SLC1_tab=tab_in,
SLC2_tab=tab_out,
mode=0,
phflg=0,
logpath=logpath,
outdir=outdir,
shellscript=shellscript)
new = 'SLC_mosaic_ScanSAR'
old = 'SLC_mosaic_S1_TOPS'
slc_mosaic = new if hasattr(isp, new) else old
getattr(isp, slc_mosaic)(SLC_tab=tab_out,
SLC=name_out,
SLC_par=name_out + '.par',
rlks=rlks,
azlks=azlks,
logpath=logpath,
outdir=outdir,
shellscript=shellscript)
if replace:
for item in [burst1, burst2, burst3]:
for subitem in [item + x for x in ['', '.par', '.tops_par']]:
os.remove(subitem)
for item in [burst1, burst2, burst3]:
for subitem in [item + x for x in ['_drp', '_drp.par', '_drp.tops_par']]:
os.remove(subitem)
os.remove(tab_in)
os.remove(tab_out)
================================================
FILE: pyroSAR/install/download_egm96_15.gtx.sh
================================================
#!/usr/bin/env bash
# download EGM96 geoid model to convert heights with GDAL
cd /usr/share/proj
sudo wget https://download.osgeo.org/proj/vdatum/egm96_15/egm96_15.gtx
sudo chmod 644 egm96_15.gtx
================================================
FILE: pyroSAR/install/download_testdata.sh
================================================
#!/usr/bin/env bash
mkdir -p $TESTDATA_DIR
#cd $TESTDATA_DIR
echo "Start Download forest_brazil"
wget --quiet -P $TESTDATA_DIR 'ftp://ftp.eorc.jaxa.jp/pub/ALOS-2/1501sample/310_forestbrazil/0000022708_001001_ALOS2015976960-140909.zip'
echo "End download forest_brazil"
================================================
FILE: pyroSAR/install/install_deps.sh
================================================
#!/usr/bin bash
##############################################################
# manual installation of pyroSAR dependencies
# GDAL, GEOS, PROJ, SpatiaLite
# John Truckenbrodt, Rhys Kidd 2017-2019
##############################################################
# define a root directory for downloading packages
root=$HOME/test
# define a directory for download and unpacked packages
downloaddir=${root}/originals
packagedir=${root}/packages
# define the installation directory; This needs to be outside of the root directory so that the latter can be deleted in the end.
# In case installdir is set to a location outside of /usr/*, the following installation commands do not need to be run with
# administration rights (sudo)
#installdir=/usr/local
installdir=$HOME/local
# the version of GDAL and its dependencies
GDALVERSION=3.0.1
# these versions are not quite as important. If you use already installed them you might need to define their location
# for the configuration of GDAL
geos_version=3.7.2
proj_version=6.1.1
spatialite_version=4.3.0
# define the number of threads for compilation
threads=2
########################################################################################################################
# setup environment variables and create directories
if [[ -d "${root}" ]]; then
if [[ "$(ls -A ${root})" ]]; then
echo "Error! root already exists. Please choose a fresh directory which can be deleted once finished" 1>&2
#exit 64
fi
fi
export PATH=${installdir}/bin:$PATH
export LD_LIBRARY_PATH=${installdir}/lib:$LD_LIBRARY_PATH
for dir in ${root} ${downloaddir} ${packagedir} ${installdir}; do
mkdir -p ${dir}
done
########################################################################################################################
# download GDAL and its dependencies
declare -a remotes=(
"https://download.osgeo.org/gdal/$GDALVERSION/gdal-$GDALVERSION.tar.gz"
"https://download.osgeo.org/geos/geos-$geos_version.tar.bz2"
"https://download.osgeo.org/proj/proj-$proj_version.tar.gz"
"https://www.gaia-gis.it/gaia-sins/libspatialite-sources/libspatialite-$spatialite_version.tar.gz"
)
for package in "${remotes[@]}"; do
wget ${package} -nc -P ${downloaddir}
done
########################################################################################################################
# unpack downloaded archives
for package in ${downloaddir}/*tar.gz; do
tar xfvz ${package} -C ${packagedir}
done
for package in ${downloaddir}/*tar.bz2; do
tar xfvj ${package} -C ${packagedir}
done
########################################################################################################################
# install GEOS
cd ${packagedir}/geos*
./configure --prefix ${installdir}
make -j${threads}
sudo make install
########################################################################################################################
# install PROJ
cd ${packagedir}/proj*
./configure --prefix ${installdir}
make -j${threads}
sudo make install
########################################################################################################################
# install spatialite
cd ${packagedir}/libspatialite*
# PROJ now uses a new API, using the old deprecated one (as done by spatialite) needs to be indicated explicitly
./configure --prefix=${installdir} \
CFLAGS=-DACCEPT_USE_OF_DEPRECATED_PROJ_API_H
make -j${threads}
sudo make install
########################################################################################################################
# install GDAL
# please check the output of configure to make sure that the GEOS and PROJ drivers are enabled
# otherwise you might need to define the locations of the packages
python_bin=/usr/bin/python3.6
cd ${packagedir}/gdal*
./configure --prefix ${installdir} \
--with-python=${python_bin} \
--with-geos=${installdir}/bin/geos-config \
--with-proj=${installdir} \
--with-spatialite=${installdir}
make -j${threads}
sudo make install
########################################################################################################################
# install GDAL Python binding inside a virtual environment
python -m pip install gdal==$GDALVERSION --global-option=build_ext --user --global-option="-I$installdir/include"
########################################################################################################################
########################################################################################################################
# install pysqlite2 python package with static sqlite3 build
# this needs git to be installed
cd ${packagedir}
git clone https://github.com/ghaering/pysqlite.git
cd pysqlite
wget https://sqlite.org/2019/sqlite-amalgamation-3290000.zip
unzip sqlite-amalgamation-3290000.zip
cp sqlite-amalgamation-3290000/* .
sudo python setup.py build_static install --prefix=${installdir}
########################################################################################################################
########################################################################################################################
# finishing the process
echo depending on your choice of installdir and Python version you might need to add the following lines to your .bashrc:
echo "export PATH=${installdir}/bin:$"PATH
echo "export LD_LIBRARY_PATH=${installdir}/lib:$"LD_LIBRARY_PATH
echo "export PYTHONPATH=${installdir}/lib64/python3.6/site-packages:$"PYTHONPATH
echo "done"
# deleting the root directory which is no longer needed
sudo rm -rf ${root}
================================================
FILE: pyroSAR/patterns.py
================================================
###############################################################################
# Reading and Organizing system for SAR images
# Copyright (c) 2016-2023, the pyroSAR Developers.
# This file is part of the pyroSAR Project. It is subject to the
# license terms in the LICENSE.txt file found in the top-level
# directory of this distribution and at
# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
# No part of the pyroSAR project, including this file, may be
# copied, modified, propagated, or distributed except according
# to the terms contained in the LICENSE.txt file.
###############################################################################
"""
This file contains regular expressions to identify SAR products.
The pattern 'pyrosar' identifies products in pyroSAR's unified naming scheme.
The names of all other expressions correspond to the classes found in pyroSAR.drivers.
"""
pyrosar = r'(?:.*[/\\]|)' \
r'(?P' \
r'(?P[A-Z0-9]{1,4})_+' \
r'(?P[A-Z0-9]{1,4})_+' \
r'(?P[AD])_' \
r'(?P[0-9T]{15})' \
r'(?:_(?P\w*?)|)' \
r')_*' \
r'(?:(?P[HV]{2})_' \
r'(?P[\w-]*)|)' \
r'(?P(?:.tif|.nc|))$'
ceos_ers = r'(?P(?:SAR|ASA)_(?:IM(?:S|P|G|M|_)|AP(?:S|P|G|M|_)|WV(?:I|S|W|_)|WS(?:M|S|_))_[012B][CP])' \
r'(?P[A-Z])' \
r'(?P[A-Z\-]{3})' \
r'(?P[0-9]{8})_' \
r'(?P[0-9]{6})_' \
r'(?P[0-9]{8})' \
r'(?P[0-9A-Z]{1})' \
r'(?P[0-9]{3})_' \
r'(?P[0-9]{5})_' \
r'(?P[0-9]{5})_' \
r'(?P[0-9]{4,})\.' \
r'(?P[EN][12])' \
r'(?P(?:\.zip|\.tar\.gz|\.PS|))$'
ceos_psr1 = r'^LED-ALPSR' \
r'(?PP|S)' \
r'(?P[0-9]{5})' \
r'(?P[0-9]{4})-' \
r'(?P[HWDPC])' \
r'(?P1\.[015])' \
r'(?PG|_)' \
r'(?P[UPML_])' \
r'(?PA|D)$'
ceos_psr2 = r'^LED-ALOS2' \
r'(?P[0-9]{5})' \
r'(?P[0-9]{4})-' \
r'(?P[0-9]{6})-' \
r'(?PSBS|UBS|UBD|HBS|HBD|HBQ|FBS|FBD|FBQ|WBS|WBD|WWS|WWD|VBS|VBD)' \
r'(?PL|R)' \
r'(?P1\.0|1\.1|1\.5|2\.1|3\.1)' \
r'(?P[GR_])' \
r'(?P[UPML_])' \
r'(?PA|D)$'
eorc_psr = r'^PSR2-' \
r'(?PSLTR)_' \
r'(?PRSP[0-9]{3})_' \
r'(?P[0-9]{8})' \
r'(?PFBD|WBD)' \
r'(?P[0-9]{2})' \
r'(?PA|D)' \
r'(?PL|R)_' \
r'(?P[0-9A-Z]{16})-' \
r'(?P[0-9A-Z]{5})_' \
r'(?P[0-9]{3})_' \
r'HDR$'
esa = r'(?P(?:SAR|ASA)_(?:IM(?:S|P|G|M|_)|AP(?:S|P|G|M|_)|WV(?:I|S|W|_)|WS(?:M|S|_))_[012B][CP])' \
r'(?P[A-Z])' \
r'(?P[A-Z\-]{3})' \
r'(?P[0-9]{8})_' \
r'(?P[0-9]{6})_' \
r'(?P[0-9]{8})' \
r'(?P[0-9A-Z]{1})' \
r'(?P[0-9]{3})_' \
r'(?P[0-9]{5})_' \
r'(?P[0-9]{5})_' \
r'(?P[0-9]{4,})\.' \
r'(?P[EN][12])'
safe = r'^(?PS1[ABCD])_' \
r'(?PS1|S2|S3|S4|S5|S6|IW|EW|WV|EN|N1|N2|N3|N4|N5|N6|IM)_' \
r'(?PSLC|GRD|OCN)' \
r'(?PF|H|M|_)_' \
r'(?P1|2)' \
r'(?PS|A)' \
r'(?PSH|SV|DH|DV|VV|HH|HV|VH)_' \
r'(?P[0-9]{8}T[0-9]{6})_' \
r'(?P[0-9]{8}T[0-9]{6})_' \
r'(?P[0-9]{6})_' \
r'(?P[0-9A-F]{6})_' \
r'(?P[0-9A-F]{4})' \
r'\.SAFE$'
tsx = r'^(?PT[DS]X1)_SAR__' \
r'(?PSSC|MGD|GEC|EEC)_' \
r'(?P____|SE__|RE__|MON1|MON2|BTX1|BRX2)_' \
r'(?PSM|SL|HS|HS300|ST|SC)_' \
r'(?P[SDTQ])_' \
r'(?:SRA|DRA)_' \
r'(?P[0-9]{8}T[0-9]{6})_' \
r'(?P[0-9]{8}T[0-9]{6})(?:\.xml|)$'
tdm = r'^(?PT[D]M1)_SAR__' \
r'(?PCOS)_' \
r'(?P