[
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.md",
    "content": "---\nname: Bug report\nabout: Create a report to help us improve\ntitle: ''\nlabels: bug\nassignees: ''\n\n---\n\n- which operating system are you using?  \ne.g. Windows 10, Ubuntu 18.4, etc.\n- which environment is pyroSAR running in?  \ne.g. system-wide Python installation, Anaconda environment, virtual environment, etc.\n- which version of pyroSAR are you using?  \none installed via conda, pip or a clone of the GitHub repository?\n-  which function of pyroSAR did you call with which parameters?  \n- if applicable, which version of SNAP or GAMMA are you using in pyroSAR?\n- the full error message\n"
  },
  {
    "path": ".github/workflows/conda-install.yml",
    "content": "name: conda build\n\non:\n  push:\n    branches: [ main ]\n  pull_request:\n    branches: [ main ]\n    types: [ opened, reopened, synchronize ]\n  workflow_dispatch:\n    inputs:\n      debug_enabled:\n        type: boolean\n        description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)'\n        required: false\n        default: false\n\npermissions:\n  contents: read\n\nenv:\n  SNAP_VERSION: \"13.0\"\n\njobs:\n  build-linux:\n    runs-on: ubuntu-latest\n    defaults:\n      run:\n        shell: micromamba-shell {0}\n    services:\n      postgres:\n        image: postgis/postgis:16-3.4\n        env:\n          POSTGRES_PASSWORD: Password12!\n        ports:\n          - 5432:5432\n    steps:\n      - uses: actions/checkout@v3\n      - name: Set up python environment\n        uses: mamba-org/setup-micromamba@v2\n        with:\n          environment-file: environment-dev.yml\n          cache-environment: true\n          init-shell: bash\n          generate-run-shell: true\n          post-cleanup: 'all'\n      - name: Install ESA SNAP\n        run: |\n          wget -nv https://download.esa.int/step/snap/$SNAP_VERSION/installers/esa-snap_all_linux-$SNAP_VERSION.0.sh\n          bash esa-snap_all_linux-$SNAP_VERSION.0.sh -q -dir $GITHUB_ACTION_PATH/esa-snap\n      - name: Set paths and variables\n        run: |\n          echo \"$CONDA/bin\" >> $GITHUB_PATH\n          echo \"$GITHUB_ACTION_PATH/esa-snap/bin\" >> $GITHUB_PATH\n          echo \"PROJ_DATA=$CONDA/share/proj\" >> $GITHUB_ENV\n      - name: Lint with flake8\n        run: |\n          flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics\n          flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics\n      - name: Install pyroSAR\n        run: |\n          pip install .\n      - name: Test with pytest\n        run: |\n          coverage run -m pytest\n          coverage xml\n        env:\n          PGUSER: postgres\n          PGPASSWORD: Password12!\n      - name: Publish to coveralls.io\n        if: ${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}\n        uses: coverallsapp/github-action@v2.3.0\n        with:\n          github-token: ${{ github.token }}\n          format: cobertura\n\n  build-windows:\n    runs-on: windows-latest\n    steps:\n      - uses: actions/checkout@v6\n      - name: Set up micromamba environment\n        uses: mamba-org/setup-micromamba@v2\n        with:\n          environment-file: environment-dev.yml\n          cache-environment: true\n          init-shell: bash\n          generate-run-shell: true\n          post-cleanup: 'all'\n\n      - uses: nyurik/action-setup-postgis@v2.2\n        with:\n          cached-dir: C:\\downloads\n          postgres-version: 17\n\n      # ---------------- SNAP cache ----------------\n      - name: Cache SNAP zip\n        id: cache-snap\n        uses: actions/cache@v4\n        with:\n          path: |\n            snap.zip\n          key: snap-${{ env.SNAP_VERSION }}-windows\n\n      - name: Download and install SNAP (cache miss)\n        if: steps.cache-snap.outputs.cache-hit != 'true'\n        shell: cmd\n        run: |\n          echo Downloading SNAP installer...\n          curl -L -o snap.exe https://download.esa.int/step/snap/%SNAP_VERSION%/installers/esa-snap_all_windows-%SNAP_VERSION%.0.exe\n\n          echo Installing SNAP...\n          start /wait snap.exe -q -dir C:\\esa-snap\n          \n          echo Creating zip archive for cache...\n          powershell Compress-Archive -Path C:\\esa-snap -DestinationPath snap.zip\n\n      - name: Restore SNAP from zip (cache hit)\n        if: steps.cache-snap.outputs.cache-hit == 'true'\n        shell: powershell\n        run: |\n          Write-Host \"Unzipping cached SNAP...\"\n          Expand-Archive snap.zip C:\\\n\n      - name: Add SNAP to PATH\n        shell: powershell\n        run: |\n          echo \"C:\\esa-snap\\bin\" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append\n\n      # ---------------- Python steps (micromamba env) ----------------\n\n      - name: Verify micromamba python\n        shell: bash -el {0}\n        run: |\n          where python\n          python -V\n          where pip\n\n      - name: Install pyroSAR\n        shell: bash -el {0}\n        run: |\n          python -m pip install .\n\n      - name: Setup tmate session (debug)\n        uses: mxschmitt/action-tmate@v3\n        if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }}\n\n      - name: Test with pytest\n        shell: bash -el {0}\n        run: |\n          pytest -vv\n        env:\n          PGUSER: postgres\n          PGPASSWORD: postgres\n"
  },
  {
    "path": ".github/workflows/python-publish.yml",
    "content": "# This workflow will upload a Python Package using Twine when a release is created\n# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries\n\n# This workflow uses actions that are not certified by GitHub.\n# They are provided by a third-party and are governed by\n# separate terms of service, privacy policy, and support\n# documentation.\n\nname: Upload Python Package\n\non:\n  release:\n    types: [ published ]\n\npermissions:\n  contents: read\n\njobs:\n  deploy:\n\n    runs-on: ubuntu-latest\n\n    steps:\n    - uses: actions/checkout@v3\n    - name: Set up Python\n      uses: actions/setup-python@v3\n      with:\n        python-version: '3.x'\n    - name: Install dependencies\n      run: |\n        python -m pip install --upgrade pip\n        pip install build\n    - name: Build package\n      run: python -m build\n    - name: Publish package\n      uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29\n      with:\n        user: __token__\n        password: ${{ secrets.PYPI_API_TOKEN }}\n"
  },
  {
    "path": ".gitignore",
    "content": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n.hypothesis/\n.pytest_cache/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n.python-version\n\n# celery beat schedule file\ncelerybeat-schedule\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n\n.idea/\nout/\ndev_*\n\n# OSX tempfiles\n.DS_Store"
  },
  {
    "path": ".travis.yml",
    "content": "dist: jammy\nlanguage: python\nsudo: required\ncache:\n  directories:\n    - ~/.cache/pip\n\nenv:\n  global:\n    - PIP_WHEEL_DIR=$HOME/.cache/pip/wheels\n    - PIP_FIND_LINKS=file://$HOME/.cache/pip/wheels\n    - TESTDATA_DIR=$HOME/testdata\n    - PGUSER=travis\n    - PGPASSWORD=Password12!\n    - SNAP_VERSION=10\n\naddons:\n  postgresql: '14'\n  apt:\n    sources:\n      - sourceline: 'ppa:ubuntugis/ppa'\n    packages:\n      - libgdal-dev\n      - gdal-bin\n      - libsqlite3-mod-spatialite\n      - libproj-dev\n      - python3-dev\n      - postgresql-14-postgis-3\n\nservices:\n  - postgresql\n\npython:\n  - '3.10'\n\nbefore_install:\n  - export SNAP_INSTALLER=esa-snap_sentinel_linux-\"$SNAP_VERSION\".0.0.sh\n  - wget -O $SNAP_INSTALLER https://download.esa.int/step/snap/\"$SNAP_VERSION\"_0/installers/\"$SNAP_INSTALLER\"\n  - bash $SNAP_INSTALLER -q\n  - export PATH=$PATH:/opt/snap/bin\n\ninstall:\n  - mkdir -p ~/.cache/pip/wheels # remove warning \"Url 'file:///home/travis/.cache/pip/wheels' is ignored: it is neither a file nor a directory.\"\n  - pip install --ignore-installed setuptools pip six certifi # install packages inside the venv if the system version is too old\n  - pip install numpy\n  - pip install GDAL==$(gdal-config --version) --global-option=build_ext --global-option=\"$(gdal-config --cflags)\"\n  - pip install coveralls coverage\n  - pip install .[test]\n\n#before_script:\n#  - travis_wait 40 . ./pyroSAR/install/download_testdata.sh\n\nbefore_script:\n  - psql -U $PGUSER -c 'create database travis_ci_test'\n  - psql -U $PGUSER -c \"create extension if not exists postgis\"\n  - psql -U $PGUSER -c \"alter user ${PGUSER} password '${PGPASSWORD}'\"\n\nscript:\n  - coverage run -m pytest\n\nafter_success:\n  - coveralls\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing to pyroSAR\n\nFirst off, thanks for considering a contribution to pyroSAR. Any contribution, may it be a feature suggestion, a pull \nrequest or s simple bug report, is valuable to the project and very welcome.\nThis document is intended as a guideline on best practices.\n\n## How to open an issue\nThe easiest way to contribute to pyroSAR is by opening an issue. This is intended for reporting software bugs and \nsuggesting new features. Before you do, please read through the list of \n[open issues](https://github.com/johntruckenbrodt/pyroSAR/issues) to see whether this issue has already been raised.\nThis way, duplicates can be reduced and it is easier for the developers to address them.\nIf you are not sure whether your issue is a duplicate of an existing one, just open a new issue. It is easier to link \ntwo existing similar issues than separating two different ones contained in one.\nFor reporting bugs please fill out the template, which is available once you open it. For suggesting new features you\ncan just delete the template text.  \nThe following questions need to be answered so that is is possible for the developers to start fixing the software:\n- which operating system are you using?  \ne.g. Windows 10, Ubuntu 18.4, etc.\n- which environment is pyroSAR running in?  \ne.g. system-wide Python installation, Anaconda environment, virtual environment, etc.\n- which version of pyroSAR are you using?  \none installed via pip or a clone of the GitHub repository?\n-  which function of pyroSAR did you call with which parameters?  \n- if applicable, which version of SNAP or GAMMA are you using in pyroSAR?\n- the full error message\n\nThis way the error is reproducible and can quickly be fixed.\n\n## Checking pyroSAR's version\nThe used version can be obtained like this:\n```python\nimport pyroSAR\nprint(pyroSAR.__version__)\n```\nDepending on how you installed pyroSAR the version might look differently. \nIf installed via pip with `pip install pyroSAR`, the package is downloaded from \n[PyPI](https://pypi.org/project/pyroSAR/), \nwhere only the main releases are stored and versions are named e.g. `0.9.1`. \nThese can also be found on GitHub [here](https://github.com/johntruckenbrodt/pyroSAR/releases).\nIf you have installed pyroSAR directly from GitHub like so:\n```shell script\npython3 -m pip install git+https://github.com/johntruckenbrodt/pyroSAR\n```\nor have directly cloned a branch from GitHub, your version might look like this:\n`0.9.2.dev103+g57eeb30`, in which this naming pattern is used:  \n`{next_version}.dev{distance}+{scm letter}{revision hash}`.\nIn this case we can see that git is used as scm and the latest commit of the software was \n[57eeb30](https://github.com/johntruckenbrodt/pyroSAR/commit/57eeb30970dc6adfee62ca12fd8c8818ecaf3a14), \nwhich, at the time of checking the version, had a distance of 103 commits to the latest commit.\nSee [here](https://www.diycode.cc/projects/pypa/setuptools_scm) for more details.\n"
  },
  {
    "path": "LICENSE.txt",
    "content": "# Copyright (c) 2014-2026, the pyroSAR Developers.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated\ndocumentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the\nrights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit\npersons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\nWARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\nOTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
  },
  {
    "path": "MANIFEST.in",
    "content": "recursive-include pyroSAR/snap *.xml\nrecursive-include pyroSAR/snap/data *\nrecursive-include pyroSAR/ERS/data *\nrecursive-exclude tests *\nrecursive-exclude .github *\nexclude .travis.yml appveyor.yml\n"
  },
  {
    "path": "README.md",
    "content": "<h1 align=\"center\">\n  <br>\n  <a>pyroSAR</a>\n</h1>\n<h3 align=\"center\">A Python Framework for Large-Scale SAR Satellite Data Processing</h3>\n\n<p align=\"center\">\n  <a href='https://github.com/johntruckenbrodt/pyroSAR/actions/workflows/conda-install.yml'>\n    <img src='https://github.com/johntruckenbrodt/pyroSAR/actions/workflows/conda-install.yml/badge.svg' alt='Conda Build Status'></a>\n  <a href='https://coveralls.io/github/johntruckenbrodt/pyroSAR?branch=main'>\n    <img src='https://coveralls.io/repos/github/johntruckenbrodt/pyroSAR/badge.svg?branch=main' alt='Coveralls Status' /></a>\n  <a href='https://pyrosar.readthedocs.io/en/latest/?badge=latest'>\n    <img src='https://readthedocs.org/projects/pyrosar/badge/?version=latest' alt='Documentation Status' /></a>\n  <a href='https://badge.fury.io/py/pyroSAR'>\n    <img src='https://badge.fury.io/py/pyroSAR.svg' alt='PyPI Status' /></a>\n  <a href='https://anaconda.org/conda-forge/pyrosar'>\n    <img src='https://img.shields.io/conda/vn/conda-forge/pyrosar.svg' alt='Conda-Forge Status' /></a>\n</p>\n\nThe pyroSAR package aims at providing a complete solution for the scalable organization and processing of SAR satellite data:\n* Reading of data from various past and present satellite missions\n* Handling of acquisition metadata\n* User-friendly access to processing utilities in [SNAP](https://step.esa.int/main/toolboxes/snap/) \nand [GAMMA Remote Sensing](https://www.gamma-rs.ch/) software\n* Formatting of the preprocessed data for further analysis\n* Export to Data Cube solutions\n\nHead on over to [readthedocs](https://pyrosar.readthedocs.io/en/latest/?badge=latest) for installation instructions,\nexamples and API reference.\n"
  },
  {
    "path": "appveyor.yml",
    "content": "# thanks a lot to the Nansat project (https://github.com/nansencenter/nansat) from which this file was adapted\nenvironment:\n  matrix:\n    - TARGET_ARCH: x64\n      CONDA_PY: 36\n      CONDA_INSTALL_LOCN: C:\\Miniconda3-x64\n      GDAL_DATA: C:\\Miniconda3-x64\\Library\\share\\gdal\n      PROJECT_DIR: C:\\projects\\pyrosar\n      SNAP_INSTALL: C:\\projects\\snap\n      PGUSER: postgres\n      PGPASSWORD: Password12!\n      SNAP_VERSION: 10\n      SNAP_INSTALLER: esa-snap_sentinel_windows-%SNAP_VERSION%.0.0.exe\n\nplatform:\n  - x64\n\nservices:\n  - postgresql96\n\ninstall:\n  # Cygwin's git breaks conda-build. (See https://github.com/conda-forge/conda-smithy-feedstock/pull/2.)\n  - rmdir C:\\cygwin /s /q\n\n  # install PostGIS\n  - appveyor DownloadFile  https://download.osgeo.org/postgis/windows/pg96/archive/postgis-bundle-pg96-3.2.0x64.zip\n  - 7z x .\\postgis-bundle-pg96-3.2.0x64.zip\n  - xcopy /e /y /q .\\postgis-bundle-pg96-3.2.0x64 C:\\Progra~1\\PostgreSQL\\9.6\n\n  # activate conda\n  - call %CONDA_INSTALL_LOCN%\\Scripts\\activate.bat\n\n  # If there is a newer build queued for the same PR, cancel this one.\n  - appveyor DownloadFile https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py\n  - python ff_ci_pr_build.py -v --ci \"appveyor\" \"%APPVEYOR_ACCOUNT_NAME%/%APPVEYOR_PROJECT_SLUG%\" \"%APPVEYOR_BUILD_NUMBER%\" \"%APPVEYOR_PULL_REQUEST_NUMBER%\"\n  - del ff_ci_pr_build.py\n\n  # update conda\n  - conda update --yes --quiet conda\n\n  - set PYTHONUNBUFFERED=1\n\n\n  # Add our channels.\n  - conda config --set show_channel_urls true\n  - conda config --remove channels defaults\n  - conda config --add channels defaults\n  - conda config --add channels conda-forge\n\n  # install ESA SNAP\n  - appveyor DownloadFile https://download.esa.int/step/snap/%SNAP_VERSION%_0/installers/%SNAP_INSTALLER%\n  - start %SNAP_INSTALLER% -q -dir %SNAP_INSTALL%\n\n  - set PATH=%PATH%;%SNAP_INSTALL%\\bin\n\n  - echo %PATH%\n\n  # Configure the VM.\n  - conda env create --file environment-dev.yml\n  - conda activate ps_test_dev\n  - pip install .\n\n# Skip .NET project specific build phase.\nbuild: false\n\ntest_script:\n  - coverage run -m pytest\n"
  },
  {
    "path": "datacube_prepare.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"This is a quick notebook to demonstrate the pyroSAR functionality for importing processed SAR scenes into an Open Data Cube\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"from pyroSAR.datacube_util import Product, Dataset\\n\",\n    \"from pyroSAR.ancillary import groupby, find_datasets\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# define a directory containing processed SAR scenes\\n\",\n    \"dir = '/path/to/some/data'\\n\",\n    \"\\n\",\n    \"# define a name for the product YML; this is used for creating a new product in the datacube\\n\",\n    \"yml_product = './product_def.yml'\\n\",\n    \"\\n\",\n    \"# define a directory for storing the indexing YMLs; these are used to index the dataset in the datacube\\n\",\n    \"yml_index_outdir = './yml_indexing'\\n\",\n    \"\\n\",\n    \"# define a name for the ingestion YML; this is used to ingest the indexed datasets into the datacube\\n\",\n    \"yml_ingest = './ingestion.yml'\\n\",\n    \"\\n\",\n    \"# product description\\n\",\n    \"product_name_indexed = 'S1_GRD_index'\\n\",\n    \"product_name_ingested = 'S1_GRD_ingest'\\n\",\n    \"product_type = 'gamma0'\\n\",\n    \"description = 'this is just some test'\\n\",\n    \"\\n\",\n    \"# define the units of the dataset measurements (i.e. polarizations)\\n\",\n    \"units = 'backscatter'\\n\",\n    \"# alternatively this could be a dictionary:\\n\",\n    \"# units = {'VV': 'backscatter VV', 'VH': 'backscatter VH'}\\n\",\n    \"\\n\",\n    \"ingest_location = './ingest'\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# find pyroSAR files by metadata attributes\\n\",\n    \"files = find_datasets(dir, recursive=True, sensor=('S1A', 'S1B'), acquisition_mode='IW')\\n\",\n    \"\\n\",\n    \"# group the found files by their file basenames\\n\",\n    \"# files with the same basename are considered to belong to the same dataset\\n\",\n    \"grouped = groupby(files, 'outname_base')\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"print(len(files))\\n\",\n    \"print(len(grouped))\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"In the next step we create a new product, add the grouped datasets to it and create YML files for indexing the datasets in the cube.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"# create a new product and add the collected datasets to it\\n\",\n    \"# alternatively, an existing product can be used by providing the corresponding product YML file\\n\",\n    \"with Product(name=product_name_indexed,\\n\",\n    \"             product_type=product_type,\\n\",\n    \"             description=description) as prod:\\n\",\n    \"\\n\",\n    \"    for dataset in grouped:\\n\",\n    \"        with Dataset(dataset, units=units) as ds:\\n\",\n    \"\\n\",\n    \"            # add the datasets to the product\\n\",\n    \"            # this will generalize the metadata from those datasets to measurement descriptions,\\n\",\n    \"            # which define the product definition\\n\",\n    \"            prod.add(ds)\\n\",\n    \"\\n\",\n    \"            # parse datacube indexing YMLs from product and dataset metadata\\n\",\n    \"            prod.export_indexing_yml(ds, yml_index_outdir)\\n\",\n    \"\\n\",\n    \"    # write the product YML\\n\",\n    \"    prod.write(yml_product)\\n\",\n    \"    \\n\",\n    \"    # print the product metadata, which is written to the product YML\\n\",\n    \"    print(prod)\"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"Now that we have a YML file for creating a new product and individual YML files for indexing the datasets, we can create a last YML file, which will ingest the indexed datasets into the cube. For this a new product is created and the files are converted to NetCDF, which are optimised for useage in the cube. The location of those NetCDF files also needs to be defined.\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n    \"with Product(yml_product) as prod:\\n\",\n    \"    prod.export_ingestion_yml(yml_ingest, product_name_ingested, ingest_location, \\n\",\n    \"                              chunking={'x': 512, 'y': 512, 'time': 1})\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"cubeenv\",\n   \"language\": \"python\",\n   \"name\": \"cubeenv\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 3\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython3\",\n   \"version\": \"3.6.6\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 2\n}\n"
  },
  {
    "path": "docs/Makefile",
    "content": "# Minimal makefile for Sphinx documentation\n#\n\n# You can set these variables from the command line, and also\n# from the environment for the first two.\nSPHINXOPTS    ?=\nSPHINXBUILD   ?= sphinx-build\nSOURCEDIR     = source\nBUILDDIR      = build\n\n# Put it first so that \"make\" without argument is like \"make help\".\nhelp:\n\t@$(SPHINXBUILD) -M help \"$(SOURCEDIR)\" \"$(BUILDDIR)\" $(SPHINXOPTS) $(O)\n\n.PHONY: help Makefile\n\n# Catch-all target: route all unknown targets to Sphinx using the new\n# \"make mode\" option.  $(O) is meant as a shortcut for $(SPHINXOPTS).\n%: Makefile\n\t@$(SPHINXBUILD) -M $@ \"$(SOURCEDIR)\" \"$(BUILDDIR)\" $(SPHINXOPTS) $(O)\n"
  },
  {
    "path": "docs/make.bat",
    "content": "@ECHO OFF\n\npushd %~dp0\n\nREM Command file for Sphinx documentation\n\nif \"%SPHINXBUILD%\" == \"\" (\n\tset SPHINXBUILD=sphinx-build\n)\nset SOURCEDIR=source\nset BUILDDIR=build\n\n%SPHINXBUILD% >NUL 2>NUL\nif errorlevel 9009 (\n\techo.\n\techo.The 'sphinx-build' command was not found. Make sure you have Sphinx\n\techo.installed, then set the SPHINXBUILD environment variable to point\n\techo.to the full path of the 'sphinx-build' executable. Alternatively you\n\techo.may add the Sphinx directory to PATH.\n\techo.\n\techo.If you don't have Sphinx installed, grab it from\n\techo.https://www.sphinx-doc.org/\n\texit /b 1\n)\n\nif \"%1\" == \"\" goto help\n\n%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%\ngoto end\n\n:help\n%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%\n\n:end\npopd\n"
  },
  {
    "path": "docs/source/about/changelog.rst",
    "content": "#########\nChangelog\n#########\n\n0.6 | 2018-11-20\n================\n\nSAR metadata\n------------\n- new standardized  metadata fields `orbitNumber_abs`, `orbitNumber_rel`, `cycleNumber` and `frameNumber` for all SAR\n  formats\n- customization of output file names with additional metadata fields (e.g. orbit numbers)\n\nsoftware configuration\n----------------------\n- pyroSAR configuration file handling: the paths to the SNAP and Gamma installation as well as relevant metadata\n  directories are now registered in a configuration file `config.ini`, which is stored in a directory `.pyrosar` in the\n  user home directory\n- improved SNAP installation verification: pyroSAR now performs a deeper check of the SNAP installation to make sure\n  it is not mistaken with e.g. the Ubuntu package manager snap; relevant installation executables and directories are\n  stored in the configuration file\n\ngeneral functionality\n---------------------\n- deeper integration of package `spatialist <https://github.com/johntruckenbrodt/spatialist>`_: all the spatial file\n  handling functionality that was part of pyroSAR is now part of package spatialist; now all the functionality is imported\n  from spatialist and removed from pyroSAR\n- improved search for datasets processed by pyroSAR: new helper functions exist, which make it easier to search for\n  datasets by metadata fields, which are internally searched for in the respective file names\n- introduced gamma function parser: these new tools search for a GAMMA_HOME environment variable and, if found, parse\n  Python functions from the docstring of respective command line tools; for this, new Python scripts are created, which\n  are stored alongside the configuration file in the user home directory; this way users can easily use Python functions\n  with named parameters instead of the positional arguments of the Gamma command line tools\n- improved documentation\n\nOpen Data Cube Export\n---------------------\nfunctionality to export processed datasets directly to an Open Data Cube:\nit is now possible to create Open Data Cube product YML files as well as YML files for data indexing and ingestion\ninto this product; pyroSAR also internally checks for compatibility of a particular dataset with the target product;\nthis way, the resulting files can easily be passed to the Open Data Cube command line tools\nseveral bug fixes\n\nSNAP API\n--------\nimproved SNAP processing workflow node linking: it is now possible to add a node also before an existing one, instead\nof just after it\n\nPython package integrity\n------------------------\n- add trove classifiers for supported operating systems and MIT license for easier online search\n- exchange http with https for all URLs that support it\n\n0.7 | 2019-01-03\n================\n\nseveral changes to the functioning of the Gamma command API\n\nGAMMA API\n---------\n\nprocessing\n++++++++++\n- :func:`pyroSAR.gamma.geocode`:\n\n  * optionally write all Gamma commands to shellscript\n  * newly introduced choice of normalization method\n  * changed normalization default approach\n\n- :func:`pyroSAR.gamma.process`:\n\n  * new parameter `logfile` to specify a logfile instead of just a directory with automated file naming\n  * new parameter `shellscript` to write the executed command to a shell script protocol\n\ncommand parser\n++++++++++++++\n- add parameters `outdir` and `shellscript` to parsed functions\n- extensive improvement to accurately parse more commands\n- add parameter `inlist` to some commands, which require interactive input via `stdin`\n\ngeneral\n+++++++\n- several bug fixes\n- extended documentation\n- make use of parsed command functions internally\n- enable passing `logpath`, `outdir` and `shellscript` to all parsed functions via additional parameters for other\n  convenience functions\n\n0.8 | 2019-02-11\n================\n\nAuxiliary Data Handling\n-----------------------\n\n- new module auxdata with function :func:`pyroSAR.auxdata.dem_autoload` to automatically download tiles of\n  different DEM types overlapping with given geometries\n- class :class:`pyroSAR.S1.OSV`: reduced search time for new RES orbit state vector files;\n  included more meaningful status messages\n\nGAMMA API\n---------\n\n- new function :func:`pyroSAR.gamma.srtm.dem_autocreate` to automatically create DEMs in Gamma format from the output\n  of function :func:`pyroSAR.auxdata.dem_autoload`\n- improved writing of ENVI HDR files from class :class:`pyroSAR.gamma.ISPPar`\n- class :class:`pyroSAR.gamma.UTM`: improved to work with newer Gamma versions\n- function :func:`pyroSAR.gamma.geocode`:\n\n  + improved documentation\n  + clarified code for better readability\n  + more consistent naming scheme for all temporarily written files\n  + export temporarily written files (e.g. local incidence angle) via new parameter `export_extra`\n  + additional parametrization tests to ensure best processing result\n  + changed default of parameter `func_interp` to 2 to work best with default of parameter `normalization_method`\n    (see documentation of Gamma command pixel_area)\n\nSNAP API\n--------\n\n- function :func:`pyroSAR.snap.util.geocode`:\n\n  + export temporarily written files (e.g. local incidence angle) via new parameter `export_extra`\n\n0.9 | 2019-06-15\n================\n\nDrivers\n-------\n\n- :class:`pyroSAR.drivers.SAFE`: read heading angle, incident angle and image geometry (e.g. Ground Range) from metadata\n- :class:`pyroSAR.drivers.Archive`: improved cross-compatibility with Python2 and Python3\n\n\nSNAP API\n--------\n\n- function :func:`pyroSAR.snap.util.geocode`:\n\n  + option to export `DEM` via parameter `export_extra`\n  + added Sentinel-1 `ThermalNoiseRemoval` node via new parameter `removeS1ThermalNoise`\n  + added `Multilook` node which is executed to approximate the target resolution if necessary\n    (currently only for Sentinel-1 since metadata entries `incidence` and `image_geometry` are required)\n  + new parameter `groupsize` to split workflows into several groups, which are executed separately with\n    intermediate products written to disk. This increases processing speed\n  + simplified internal node parametrization for easier use in future functions\n  + fail if no POE orbit state vector file is found\n  + `Terrain-Flattening`:\n\n    * added additional parameters `additionalOverlap` and `oversamplingMultiple`\n    * use bilinear instead of bicubic interpolation\n  + `Remove-GRD-Border-Noise`: decrease `borderLimit` from 1000 to 500 (SNAP default)\n  + new parameter `gpt_exceptions` to execute workflows containing specific nodes with different GPT versions than\n    the default one\n  + automatically remove node parameters on GPT fail and re-run the modified workflow; this is relevant if a node is\n    executed in an older GPT version (e.g. via parameter `gpt_exceptions`), which does not accept parameters which were\n    introduced in later GPT versions (e.g. those described above for node `Terrain-Flattening`)\n  + disable/enable terrain flattening via new parameter `terrainFlattening`\n  + optionally return workflow filename with new parameter `returnWF`\n  + execute custom pyroSAR S1 GRD border noise removal (see :func:`pyroSAR.S1.removeGRDBorderNoise`)\n  + new parameters `demResamplingMethod` and `imgResamplingMethod`\n\nGAMMA API\n---------\n\n- SRTM Tools renamed to DEM Tools\n\n  + function :func:`pyroSAR.gamma.dem.dem_autocreate`:\n\n    * define arbitrary output CRS and resolution via new parameters `t_srs` and `tr`\n    * optionally perform geoid to ellipsoid conversion in either GDAL or GAMMA via new parameter `geoid_mode`\n\n- function :func:`pyroSAR.gamma.geocode`:\n\n  + removed multiplication of backscatter with cosine of incident angle via command `lin_comb`\n  + fixed bug in writing correct nodata values to ancillary products defined via parameter `export_extra`\n  + changed default of parameter `func_geoback` from 2 to 1 (GAMMA default)\n\n- function :func:`pyroSAR.gamma.correctOSV`:\n\n  + fixed bug in using the first OSV file in a directory for correcting an image, which resulted in S1B files being\n    corrected with S1A OSV files. This occasionally resulted in errors of no DEM overlap while processing S1B scenes\n\n- fixed bug in treating GAMMA image pixel coordinates as top left instead of pixel center. This is relevant for writing\n  ENVI HDR files for GAMMA images via function :func:`pyroSAR.gamma.par2hdr` resulting in the image to be shifted\n  by 1/2 pixel to Southeast\n\nCommand Parser\n++++++++++++++\n- compatibility with GAMMA version released in November 2018\n- delete parsed modules if environment variable `GAMMA_HOME` was reset causing them to be re-parsed with the new version\n  on module import\n\ngeneral functionality\n---------------------\n\n- new function :func:`pyroSAR.ancillary.multilook_factors` to compute factors depending on image geometry and target resolution\n- :func:`pyroSAR.S1.removeGRDBorderNoise`: reached Python3 compatibility\n\nAuxiliary Data Handling\n-----------------------\n\n- new function :func:`pyroSAR.auxdata.dem_create` for convenient creation of DEM mosaics as downloaded by\n  :func:`pyroSAR.auxdata.dem_autoload`\n\n- function :func:`pyroSAR.auxdata.dem_autoload`: download 1 degree tiles instead of 5 degree tiles\n\n- class :class:`pyroSAR.S1.OSV`:\n\n  + download files specific to the Sentinel-1 sensor (S1A/S1B) instead of all matching the acquisition time\n  + improved time span search, which occasionally resulted in missing OSV files\n\n0.9.1 | 2019-07-05\n==================\n\nAuxiliary Data Handling\n-----------------------\n\n- function :func:`pyroSAR.auxdata.dem_create`: new parameter `resampling_method`\n\nGAMMA API\n---------\n\n- function :func:`pyroSAR.gamma.dem.dem_autocreate`: new parameter `resampling_method`\n\nSNAP API\n--------\n\n- function :func:`pyroSAR.snap.util.geocode`: fixed typo of parameter `removeS1BorderNoise`\n\n0.10 | 2019-12-06\n=================\n\nDrivers\n-------\n\n- method :meth:`~pyroSAR.drivers.ID.bbox`: choose the output vector file format via new parameter `driver` or by\n  using one of spatialist's supported file name extensions (see :meth:`spatialist.vector.Vector.write`)\n\n- :class:`pyroSAR.drivers.SAFE`\n\n  + new method :meth:`~pyroSAR.drivers.SAFE.quicklook` for writing KMZ quicklooks\n  + method :meth:`~pyroSAR.drivers.SAFE.getOSV`: renamed parameter `outdir` to `osvdir`\n\n- :class:`pyroSAR.drivers.Archive`: remove scenes from the database if they cannot be found at their file location.\n  This is performed at each initialization of an `Archive` object.\n\nGAMMA API\n---------\n\n- new parameter `basename_extensions` for adding extra metadata fields to output image names; affects:\n\n  + :func:`pyroSAR.gamma.convert2gamma`\n  + :func:`pyroSAR.gamma.geocode`\n\n- :func:`pyroSAR.gamma.correctOSV`: make use of OSV files in SNAP's auxdata structure\n- :func:`pyroSAR.gamma.geocode`: made border nose removal optional with new parameter `removeS1BorderNoise`\n\nSNAP API\n--------\n- workflow parsing\n\n  + improved output XML for better display in SNAP GUI\n  + support for nodes with multiple input scenes, e.g. `SliceAssembly`\n\n- SAR processor (function :func:`~pyroSAR.snap.auxil.gpt`)\n\n  + write Sentinel-1 manifest.safe with processing results\n  + two methods for border noise removal: `ESA` and `pyroSAR` via new parameter `removeS1BorderNoiseMethod`\n\n- function :func:`pyroSAR.snap.util.geocode`\n\n  + optional speckle filtering with new parameter `speckleFilter`\n  + choose the output backscatter reference area (`beta0`/`gamma0`/`sigma0`) with new parameter `refarea`\n  + default of parameter `groupsize` changed to 1\n  + internally download S1 OSV files\n  + internally download SNAP's `EGM96` geoid to `WGS84` ellipsoid DEM conversion lookup table via new function\n    :func:`pyroSAR.snap.auxil.get_egm96_lookup`\n  + support for multi-scene `SliceAssembly`; can be invoke by passing a list of scenes to parameter `infile`\n  + new parameter `removeS1BorderNoiseMethod`\n  + new parameter `gpt_args` to pass additional arguments to the GPT call\n\nDatacube Tools\n--------------\n\n- :meth:`pyroSAR.datacube_util.Product.export_ingestion_yml`: new parameter `chunking`\n\nAuxiliary Data Handling\n-----------------------\n\n- OSV download functionality (class :class:`pyroSAR.S1.OSV`)\n\n  + made definition of OSV download directory optional; default is SNAP's auxdata directory\n  + organization of downloaded files into SNAP's auxdata structure:\n\n    * compression to zip\n    * sort files into subdirs for sensor, year, month\n\n  + removed method :meth:`~pyroSAR.S1.OSV.update`\n\nAncillary Tools\n---------------\n- :func:`pyroSAR.ancillary.parse_datasetname`\n\n  + support for datasets in NetCDF format\n  + enable parsing of ancillary products like local incidence angle (\\*inc_geo.tif)\n\n- :func:`pyroSAR.ancillary.find_datasets`:  new parameters `start` and `stop` for time filtering\n\ngeneral\n-------\n- bug fixes and documentation improvements\n\n0.10.1 | 2019-12-12\n===================\n\nGAMMA API\n---------\n\n- :ref:`Command API <gamma-command-api>` compatibility with GAMMA version 20191203\n\n0.11 | 2020-05-29\n=================\n\nDrivers\n-------\n\n- :class:`pyroSAR.drivers.Archive`: completely restructured to use the `SQLAlchemy <https://www.sqlalchemy.org/>`_\n  Object Relational Mapper (ORM). This makes it possible to switch between SQLite+Spatialite and PostgreSQL+PostGIS\n  database backends.\n\n- :meth:`pyroSAR.drivers.SAFE.getOSV`: new argument `returnMatch` to also return the name of an OSV file instead of just\n  downloading it.\n\nSNAP API\n--------\n\n- arbitrary nodes can now be parsed. Before, only a small selection of nodes (those used by function\n  :func:`~pyroSAR.snap.util.geocode`) were available. Now, any node and its default parametrization can be parsed to XML\n  from the GPT documentation by internally calling e.g.:\n\n    ::\n\n        gpt Terrain-Flattening -h\n\n  The parsed XML representation is saved for faster future reuse. See function :func:`~pyroSAR.snap.auxil.parse_node`\n  for details. In all cases the standard SNAP file suffix is used for output products, e.g. `_TF` for\n  `Terrain-Flattening`.\n\n- multi-source nodes like `SliceAssembly` now take any number of sources, not just two.\n  See class :class:`~pyroSAR.snap.auxil.Node`.\n\n- function :func:`pyroSAR.snap.util.geocode`:\n\n  + new argument `nodataValueAtSea` to decide whether sea areas are masked out.\n    Depends on the quality of the sea mask in the input DEM.\n  + automatically download required Sentinel-1 Orbit State Vector (OSV) files.\n  + new argument `allow_RES_OSV` to decide whether to allow usage of the less accurate Sentinel-1 RES OSV files in\n    case the POE file is not available yet.\n  + new argument `demName` to choose the type of the auto-downloaded DEM.\n\nAuxiliary Data Handling\n-----------------------\n\n- class :class:`pyroSAR.S1.OSV`:\n\n  + removed progressbar from method :meth:`~pyroSAR.S1.OSV.catch` and made it optional in method\n    :meth:`~pyroSAR.S1.OSV.retrieve` with new argument `pbar`\n\ngeneral\n-------\n- bug fixes, new automated tests, documentation improvements\n\n0.11.1 | 2020-07-17\n===================\n\n- bug fixes\n\nGAMMA API\n---------\n\n- :ref:`Command API <gamma-command-api>` compatibility with GAMMA version 20200713\n\n0.12 | 2021-02-19\n=================\n\nDrivers\n-------\n\n- :class:`pyroSAR.drivers.Archive`:\n\n  + new argument `cleanup` to automatically remove missing scenes from database on initialization\n  + method :meth:`~pyroSAR.drivers.Archive.insert`: improved insertion speed\n  + method :meth:`~pyroSAR.drivers.Archive.select_duplicates`: new argument `value`\n  + method :meth:`~pyroSAR.drivers.Archive.get_colnames`: new argument `table` to get column names from arbitrary\n    tables, not just the main `data` table\n  + method :meth:`~pyroSAR.drivers.Archive.drop_element`: option to remove scene from `data` and `duplicates` tables\n    simultaneously by removing argument `table` and adding argument `with_duplicates`\n  + method :meth:`~pyroSAR.drivers.Archive.drop_table`:\n\n    * new argument `verbose`\n    * remove arbitrary tables, not just `data` and `duplicates`\n\n  + method :meth:`~pyroSAR.drivers.Archive.drop_database`: replaced by new function :func:`pyroSAR.drivers.drop_archive`\n  + new method :meth:`~pyroSAR.drivers.Archive.add_tables` to add custom tables to a database\n  + bug fixes\n\n- :class:`pyroSAR.drivers.CEOS_PSR`:\n\n  + added support for ALOS-1 PALSAR\n  + added basic support for Level 1.0 data\n\n- :class:`pyroSAR.drivers.SAFE`:\n\n  + method :meth:`~pyroSAR.drivers.SAFE.getOSV`: new argument `useLocal` to not search online if local matching\n    files are found\n\nGAMMA API\n---------\n\n- :ref:`Command API <gamma-command-api>` compatibility with GAMMA version 20201216\n\n- function :func:`pyroSAR.gamma.convert2gamma`:\n\n  + renamed argument `S1_noiseremoval` to `S1_tnr` (thermal noise removal)\n  + new argument `S1_bnr` (border noise removal)\n\n- function :func:`pyroSAR.gamma.geocode`:\n\n  + new default ``removeS1BorderNoiseMethod='gamma'``\n  + renamed argument `tempdir` to `tmpdir`\n\nSNAP API\n--------\n\n- function :func:`pyroSAR.snap.util.geocode`:\n\n  + enable grid alignment with new arguments `alignToStandardGrid`, `standardGridOriginX` and `standardGridOriginY`\n  + new argument `tmpdir` to choose the location of temporarily created files\n  + bug fixes\n\n- function :func:`pyroSAR.snap.auxil.gpt`:\n\n  + perform custom pyroSAR S1 GRD border noise removal only if IPF<2.9\n\nAuxiliary Data Handling\n-----------------------\n\n- function :func:`pyroSAR.auxdata.dem_autoload`: return `None` if a VRT was defined\n\n0.12.1 | 2021-03-09\n===================\n\nSNAP API\n--------\n\n- function :func:`pyroSAR.snap.util.geocode`:\n\n  + output both sigma0 and gamma0 via argument `refarea`\n  + new `export_extra` option 'layoverShadowMask'\n\n- numerous bug fixes and API improvements\n\nAuxiliary Data Handling\n-----------------------\n\n- class :class:`pyroSAR.S1.OSV`:\n\n  + download files from https://scihub.copernicus.eu/gnss\n\n0.13 | 2021-09-10\n=================\n\nDrivers\n-------\n\n- new class :class:`pyroSAR.drivers.EORC_PSR`\n- new argument `exist_ok` for ID object unpack methods to enable reuse of already unpacked scenes\n- :meth:`pyroSAR.drivers.SAFE.getOSV`: new argument `url_option` to choose between different download URLs\n- :class:`pyroSAR.drivers.SAFE` align coordinate sorting of attribute `meta['coordinates']` with CRS description\n- :func:`pyroSAR.drivers.identify_many`: disable progressbar by default\n\nGAMMA API\n---------\n\n- adaptations to enable processing of :class:`~pyroSAR.drivers.EORC_PSR` data:\n\n  + :func:`pyroSAR.gamma.calibrate`\n  + :func:`pyroSAR.gamma.convert2gamma`\n  + :func:`pyroSAR.gamma.geocode`\n\n- :func:`pyroSAR.gamma.geocode`:\n\n  + experimental optional refinement of the geocoding lookup table with new argument `refine_lut`\n  + removed arguments `normalization_method`, `func_interp`, `removeS1BorderNoise`, `sarSimCC`\n  + limit radiometric normalization to RTC correction method\n  + simplify and improve computation of RTC contribution area\n  + file suffices `pan` and `norm` have been replaced with `gamma0-rtc`\n  + argument `export_extra` options:\n\n    * removed `pix_geo`\n    * renamed `pix_fine` to `pix_ratio`\n    * added `pix_area_sigma0`, `pix_area_sigma0_geo`, `pix_area_gamma0_geo`, `gs_ratio` , `gs_ratio_geo`, `pix_ratio_geo`\n\n  + use a dedicated temporary directory to unpack the scene and write GAMMA files so that they are separated (the GAMMA\n    files used to be written to the unpacked scene's directory)\n  + enable multiple scenes as input so that they can be mosaiced in SAR geometry before geocoding\n\n- :func:`pyroSAR.gamma.correctOSV`: new argument `directory`\n\n- :func:`pyroSAR.gamma.multilook`: new argument `exist_ok`\n\n- :func:`pyroSAR.gamma.convert2gamma`: new argument `exist_ok`\n\n- function :func:`pyroSAR.gamma.dem.dem_autocreate`:\n\n  + do not apply an extent buffer by default\n  + allow geometry in arbitrary CRS\n\nSNAP API\n--------\n\n- function :func:`pyroSAR.snap.util.geocode`:\n\n  + new `export_extra` option `scatteringArea`\n\n- extended support for `BandMaths` operator\n\nAuxiliary Data Handling\n-----------------------\n\n- method :meth:`pyroSAR.S1.OSV.catch`: new argument `url_option` with two download URLs to choose from\n\n- function :func:`pyroSAR.auxdata.dem_autoload`:\n\n  + added new DEM option `GETASSE30`\n  + align pixels of subsetted VRT with original tiles\n\n- function :func:`pyroSAR.auxdata.dem_create`:\n\n  + new argument `outputBounds`\n\ngeneral\n-------\n- replaced print messages with logging. This made the `verbose` argument that was used by several functions and\n  methods obsolete; affects the following:\n\n  + :func:`pyroSAR.drivers.identify_many`: replaced by argument `pbar`\n  + :meth:`pyroSAR.drivers.Archive.add_tables`: removed\n  + :meth:`pyroSAR.drivers.Archive.drop_table`: removed\n  + :meth:`pyroSAR.drivers.Archive.insert`: replaced by argument `pbar`\n  + :meth:`pyroSAR.drivers.Archive.import_outdated`: removed\n  + :meth:`pyroSAR.drivers.Archive.move`: replaced by argument `pbar`\n  + :meth:`pyroSAR.drivers.Archive.select`: removed\n  + :func:`pyroSAR.snap.auxil.execute`: removed\n\n  See section :doc:`Logging </general/logging>` for details.\n\n0.14.0 | 2021-10-12\n===================\n\nDrivers\n-------\n- raise more appropriate errors (`c430c59 <https://github.com/johntruckenbrodt/pyroSAR/commit/c430c59289016b5fe2e0f3044225dc5166c39e80>`_)\n\n- :func:`pyroSAR.drivers.findfiles`: removed (functionality contained in :meth:`pyroSAR.drivers.ID.findfiles`,\n  now making use of :func:`spatialist.ancillary.finder`)\n\n- :meth:`pyroSAR.drivers.Archive.select`:\n\n  + show progressbar for scene identification if ``pbar=True``\n  + enabled input of :obj:`~datetime.datetime` objects for arguments ``mindate`` and ``maxdate``\n\n- :func:`pyroSAR.drivers.identify_many`: issue a warning when a file cannot be accessed\n  (instead of raising a :obj:`PermissionError`)\n\nGAMMA API\n---------\n- :func:`pyroSAR.gamma.dem.dem_autocreate`: support for new DEM options provided by :func:`pyroSAR.auxdata.dem_autoload`\n\nSNAP API\n--------\n- :func:`pyroSAR.snap.auxil.get_egm96_lookup` removed in favor of new function :func:`pyroSAR.auxdata.get_egm_lookup`\n\nAuxiliary Data Handling\n-----------------------\n- method :meth:`pyroSAR.S1.OSV.retrieve`: thread-safe writing of orbit files\n\n- new function :func:`pyroSAR.auxdata.get_egm_lookup`\n\n- function :func:`pyroSAR.auxdata.dem_create`\n\n  + new geoid option 'EGM2008'\n  + make use of :func:`~pyroSAR.auxdata.get_egm_lookup` for auto-download of EGM lookup files\n  + several bug fixes related to vertical CRS transformation\n  + bug fix for target pixel alignment\n\n- function :func:`pyroSAR.auxdata.dem_autoload`: new DEM options:\n\n  + 'Copernicus 10m EEA DEM'\n  + 'Copernicus 30m Global DEM'\n  + 'Copernicus 90m Global DEM'\n\ngeneral\n-------\n- replaced http URLs with https where applicable\n- improved documentation\n\n0.15.0 | 2022-01-04\n===================\n\nDrivers\n-------\n- :meth:`pyroSAR.drivers.ID.geometry`: new method\n\nGAMMA API\n---------\n- :ref:`Command API <gamma-command-api>` compatibility with GAMMA version 20211208\n\n- renamed argument `resolution` to `spacing`; affects:\n\n  + :func:`pyroSAR.gamma.geocode`\n  + :func:`pyroSAR.gamma.ovs`\n  + :func:`pyroSAR.gamma.multilook`\n\n- function :func:`pyroSAR.gamma.calibrate`\n\n  + removed argument `replace`\n  + added argument `return_fnames`\n\n- function :func:`pyroSAR.gamma.convert2gamma`\n\n  + added argument `return_fnames`\n\n- function :func:`pyroSAR.gamma.multilook`\n\n  + pass multiple Sentinel-1 sub-swaths to argument `infile` which are then\n    combined into a single MLI using GAMMA command `isp.multi_look_ScanSAR`\n\n- class :class:`pyroSAR.gamma.ISPPar`:\n\n  + new object attribute `filetype` with possible values 'isp' and 'dem'\n\nSNAP API\n--------\n- function :func:`pyroSAR.snap.util.geocode`:\n\n  + enabled SLC processing\n  + enable processing of sigma nought RTC\n  + new `export_extra` argument `gammaSigmaRatio`\n  + simplified workflow by writing layover-shadow mask directly from `Terrain-Correction`\n  + changed processing node sequence:\n\n    * was: Read->ThermalNoiseRemoval->SliceAssembly->Remove-GRD-Border-Noise->Calibration\n    * is:  Read->Remove-GRD-Border-Noise->Calibration->ThermalNoiseRemoval->SliceAssembly\n\n  + new output image naming scheme, e.g.\n\n    * S1A__IW___A_20210914T191350_VV_gamma0-rtc.tif\n    * S1A__IW___A_20210914T191350_VH_sigma0-elp.tif\n\n- function :func:`pyroSAR.snap.auxil.gpt`:\n\n  + removed argument `multisource`\n  + added argument `tmpdir`\n\nAuxiliary Data Handling\n-----------------------\n- function :func:`pyroSAR.auxdata.dem_autoload`:\n\n  + updated version of 'Copernicus 10m EEA DEM' from '2020_1' to '2021_1'\n  + new DEM options:\n\n    * 'Copernicus 30m Global DEM II'\n    * 'Copernicus 90m Global DEM II'\n\ngeneral\n-------\n- compatibility with sqlalchemy>=1.4\n\n0.15.1 | 2022-01-07\n===================\ngeneral\n-------\n- bug fixes\n\n0.16.0 | 2022-03-03\n===================\n\nDrivers\n-------\n- :class:`pyroSAR.drivers.BEAM_DIMAP`: new driver supporting SNAP's BEAM-DIMAP format\n- :class:`pyroSAR.drivers.SAFE`:\n\n  + corrected SLC metadata (was read from first sub-swath, now from center sub-swath or as sum of all sub-swaths):\n    center: spacing, heading, incidence; sum: samples, lines\n  + new property :attr:`pyroSAR.drivers.SAFE.resolution`\n\nAuxiliary Data Handling\n-----------------------\n- create water body mask mosaics from ancillary DEM products. Affects the following:\n\n  + function :func:`pyroSAR.auxdata.dem_autoload`: new arguments `nodata` and `hide_nodata`\n\n- function :func:`pyroSAR.auxdata.dem_create`:\n\n  + new arguments `pbar` and `threads`\n\nSNAP API\n--------\n- new method :meth:`pyroSAR.snap.auxil.Par_BandMath.add_equation`\n- new function :func:`pyroSAR.snap.util.noise_power`\n- new function :func:`pyroSAR.snap.auxil.erode_edges`\n- function :func:`pyroSAR.snap.auxil.writer`:\n\n  + new arguments `clean_edges` and `clean_edges_npixels`\n    (to make use of function :func:`~pyroSAR.snap.auxil.erode_edges`)\n  + enabled conversion of BEAM-DIMAP files\n\n- function :func:`pyroSAR.snap.util.geocode`:\n\n  + new arguments `clean_edges` and `clean_edges_npixels` (see function :func:`~pyroSAR.snap.auxil.writer`)\n  + renamed argument `tr` to `spacing`\n  + new arguments `rlks` and `azlks` to manually set the number of looks\n\nGAMMA API\n---------\n- function :func:`pyroSAR.gamma.geocode`:\n\n  + new arguments `rlks` and `azlks`\n\n- function :func:`pyroSAR.gamma.multilook`:\n\n  + new arguments `rlks` and `azlks`\n\ngeneral\n-------\n- correction of multi-look factor computation. Before: approximate target pixel spacing but never exceed it.\n  Now: first best approximate the azimuth spacing as close as possible (even if this means exceeding the target spacing)\n  and then choose the range looks to approximate a square pixel as close as possible. API changes:\n\n  + function :func:`pyroSAR.ancillary.multilook_factors`:\n\n    * renamed argument `sp_rg` to `source_rg`\n    * renamed argument `sp_az` to `source_az`\n    * replaced arguments `tr_rg` and `tr_az` with unified `target`\n\n0.16.1 | 2022-03-07\n===================\n\nAuxiliary Data Handling\n-----------------------\n- function :func:`pyroSAR.auxdata.get_egm_lookup`:\n\n  + changed URL for PROJ geoid models, which results in better performance for\n    function :func:`pyroSAR.auxdata.dem_create`\n    (See `pyroSAR#200 <https://github.com/johntruckenbrodt/pyroSAR/issues/200>`_).\n\n0.16.2 | 2022-03-14\n===================\n\nSNAP API\n--------\n- function :func:`pyroSAR.snap.util.noise_power`: added missing orbit state vector refinement\n\n0.16.3 | 2022-03-23\n===================\n\nSNAP API\n--------\n- function :func:`pyroSAR.snap.util.noise_power`: pass argument `cleanup` to :func:`~pyroSAR.snap.auxil.gpt` call\n- function :func:`~pyroSAR.snap.auxil.gpt`: shortened names of temporary directories\n- function :func:`~pyroSAR.snap.auxil.erode_edges`: fixed bug in polygon selection\n- function :func:`~pyroSAR.snap.auxil.writer`: do not erode edges of layover-shadow mask\n\n0.17.0 | 2022-05-30\n===================\n\nSNAP API\n--------\n\n- function :func:`pyroSAR.snap.erode_edges`: reuse mask for all images\n\nGAMMA API\n---------\n\n- new function :func:`pyroSAR.gamma.dem.dem_import`\n\n- function :func:`pyroSAR.gamma.geocode`:\n\n  + new argument `update_osv`\n\ngeneral\n-------\n\n- full support for Sentinel-1 stripmap mode; renamed `SM` naming pattern to `S1..S6` to differentiate different beams\n- bug fixes\n\n0.17.2 | 2022-06-23\n===================\n\nAuxiliary Data Handling\n-----------------------\n- function :func:`pyroSAR.auxdata.dem_create`:\n\n  + use maximum possible value of `dtype` (e.g. 255 for unit8) instead of -32767.0 if the nodata value cannot be read from the source file\n  + always use the same value for source and destination nodata\n\n0.17.3 | 2022-07-03\n===================\n\nAuxiliary Data Handling\n-----------------------\n- function :func:`pyroSAR.auxdata.dem_create`:\n\n  + In case the nodata value could not be read from the source file, the function used to define a value itself, which is prone to errors. This value now needs to be set by a user via new argument `nodata` if it cannot be read from the source file.\n  + bug fix: no longer try to download 'Copernicus 30m Global DEM' or 'Copernicus 90m Global DEM' tiles that don't exist.\n\n- function :func:`pyroSAR.auxdata.dem_autoload`:\n\n  + new argument `dst_nodata`. This can be used to temporarily override the native nodata value for extrapolation of ocean areas (in combination with ``hide_nodata=True``).\n\n0.18.0 | 2022-08-24\n===================\n\nDrivers\n-------\n- method :meth:`pyroSAR.drivers.SAFE.quicklook`: new argument `na_transparent`\n- new class :class:`~pyroSAR.drivers.TDM`\n- method :meth:`pyroSAR.drivers.TSX.getCorners`: fixed bug in longitude computation\n- class :class:`~pyroSAR.drivers.ESA`: improved support for ERS and ASAR\n\n\nGAMMA API\n---------\n- :ref:`Command API <gamma-command-api>` compatibility with GAMMA version 20220629\n\nSNAP API\n--------\n- compatibility with SNAP version 9\n- function :func:`~pyroSAR.snap.util.geocode`: improved support for ERS and ASAR\n\n0.19.0 | 2022-09-28\n===================\n\nDrivers\n-------\n- class :class:`pyroSAR.drivers.ESA`: added support for ASAR WSM\n\nSNAP API\n--------\n- new convenience functions:\n\n  + :func:`pyroSAR.snap.auxil.geo_parametrize`\n  + :func:`pyroSAR.snap.auxil.sub_parametrize`\n  + :func:`pyroSAR.snap.auxil.mli_parametrize`\n  + :func:`pyroSAR.snap.auxil.dem_parametrize`\n\n- function :func:`pyroSAR.snap.auxil.orb_parametrize`: removed args `workflow`, `before`, `continueOnFail`; added `kwargs`\n- function :func:`pyroSAR.snap.auxil.erode_edges`: extended to also take a BEAM-DIMAP product as input or a folder of multiple ENVI files (and not just and individual ENVI file)\n- function :func:`pyroSAR.snap.auxil.Workflow.insert_node`: option to insert multiple nodes at once\n\nAuxiliary Data Handling\n-----------------------\n- function :func:`pyroSAR.auxdata.dem_autoload`:\n\n  + new argument `crop` to optionally return the full extent of all overlapping DEM tiles\n  + added download status print messages\n  + download and modify a Copernicus DEM index file for future reuse; this removes the need to search the FTP server for files and thus greatly accelerates the process of collecting all files overlapping with the AOI\n\n0.20.0 | 2022-12-27\n===================\n\nDrivers\n-------\n- class :class:`pyroSAR.drivers.ESA`: changed ASAR orbit type from DELFT to DORIS\n- class :class:`pyroSAR.drivers.BEAM_DIMAP`: new attributes `meta['incidence']` and `meta['image_geometry']`\n- class :class:`pyroSAR.drivers.Archive`: new argument `date_strict` for method :meth:`~pyroSAR.drivers.Archive.select`\n\nSNAP API\n--------\n- function :func:`pyroSAR.snap.util.geocode`: force multi-looking for ERS1, ERS2, ASAR even if range and azimuth factor are both 1\n\nAuxiliary Data Handling\n-----------------------\n- function :func:`pyroSAR.auxdata.dem_autoload`:\n\n  + no longer require DEM tiles for creating a mosaic to address ocean cases\n  + simplified handling and removed arguments `nodata`, `dst_nodata` and `hide_nodata`\n  + the DEM option 'Copernicus 30m Global DEM' now also includes several auxiliary layers that can be downloaded automatically\n  + the URLs for DEM options 'SRTM 3Sec' and 'TDX90m' have been updated\n\n- function :func:`pyroSAR.auxdata.dem_create`:\n\n  + option to customize the output DEM via additional keyword arguments to be passed to :func:`spatialist.auxil.gdalwarp`\n  + no longer require a nodata value\n\n0.21.0 | 2023-05-11\n===================\n\nDrivers\n-------\n- class :class:`pyroSAR.drivers.Archive`:\n\n  + improved PostgreSQL connection stability\n  + method :meth:`~pyroSAR.drivers.Archive.select`: the `vectorobject` geometry is now cloned before being reprojected to EPSG:4326 so that the source geometry remains unaltered\n\nGAMMA API\n---------\n- the `LAT` module is no longer needed: new pyroSAR-internal implementations can be used if the module is missing (concerns commands `product`, `ratio` and `linear_to_dB`)\n- improved backwards compatibility:\n\n  + use `multi_look_ScanSAR` if present and `multi_S1_TOPS` otherwise\n  + use `gc_map2` if possible (present and with all needed arguments) and `gc_map` otherwise\n  + addressed the case where `gc_map` does not have an argument `OFF_par`\n\n- function `gamma.pixel_area_wrap`: new argument `exist_ok` (this function will be made more visible in the documentation once matured)\n- bug fixes:\n\n  + :func:`pyroSAR.gamma.convert2gamma`: raise an error if `S1_bnr=True` but the GAMMA command does not support border noise removal\n  + :func:`pyroSAR.gamma.geocode`: removed unneeded underscore in HDR file naming\n  + `gamma.pixel_area_wrap`: fixed some issues with occasionally missing intermediate files, e.g. for computing ratios\n\nSNAP API\n--------\n- function :func:`pyroSAR.snap.util.geocode`: new argument `dem_oversampling_multiple` with default 2 to increase the DEM oversampling factor for terrain flattening\n- function :func:`pyroSAR.snap.auxil.erode_edges`:\n\n  + do not attempt to perform erosion if the image only contains nodata (this might happen if only parts of the image were geocoded)\n  + make sure that a backscatter image is used for erosion (auxiliary data like the local incidence angle often has a larger valid data extent and using such image for erosion would thus not properly erode edges of the backscatter images; additionally this has the effect that all images will have the same valid data extent after erosion)\n  + the written mask files (delineating valid data and nodata after erosion of the backscatter image and used for masking all other images) are now compressed (deflate) so that data volume is decreased significantly\n\nAuxiliary Data Handling\n-----------------------\n- function :func:`pyroSAR.auxdata.dem_create`:\n\n  + new argument `resampleAlg` to change the resampling algorithm\n\n0.22.0 | 2023-09-21\n===================\n\nDrivers\n-------\n- class :class:`pyroSAR.drivers.Archive`:\n\n  + allow multiple products with same `outname_base`, e.g. Sentinel-1 GRD and SLC; this required the introduction of a second primary key in the database\n  + method :meth:`~pyroSAR.drivers.Archive.import_outdated`: option to import data from an old database with only one primary key; this requires the old\n    database to be opened in legacy mode (new argument `legacy=True`)\n\n- class :class:`pyroSAR.drivers.SAFE`: support for handling Sentinel-1 OCN products (metadata reading and database handling)\n\nAuxiliary Data Handling\n-----------------------\n- class :class:`pyroSAR.auxdata.DEMHandler`: enabled handling of southern hemisphere geometries.\n\n0.22.1 | 2023-10-11\n===================\n\nDrivers\n-------\n- class :class:`pyroSAR.drivers.BEAM_DIMAP`: enable calling inherited method :meth:`~pyroSAR.drivers.ID.geometry`\n\n0.22.2 | 2023-11-16\n===================\n\nSNAP API\n--------\n- function :func:`pyroSAR.snap.auxil.writer`: fixed bug in ignoring `erode_edges` argument\n- function :func:`pyroSAR.snap.auxil.erode_edges`: enable handling of polarimetric matrices\n\nDrivers\n-------\n- function :func:`pyroSAR.drivers.identify`: enable reading of :class:`~pyroSAR.drivers.TDM` products\n\nMisc\n----\n- class :class:`pyroSAR.examine.ExamineGamma`: enhanced flexibility in finding GAMMA installation\n\n0.23.0 | 2023-11-23\n===================\n\nDrivers\n-------\n- class :class:`pyroSAR.drivers.Archive`: fixed bug in loading spatialite on Darwin-based systems\n\nAuxiliary Data Handling\n-----------------------\n\nchanges to Sentinel-1 OSV data handling:\n\n- method :meth:`pyroSAR.S1.OSV.catch`:\n\n  + removed `url_option` 1 (https://scihub.copernicus.eu/gnss)\n  + made option 2 the new default option 1 (https://step.esa.int/auxdata/orbits/Sentinel-1)\n\n- added new arguments to the following functions:\n\n  + :func:`pyroSAR.gamma.correctOSV`: `url_option`\n  + :func:`pyroSAR.gamma.geocode`: `s1_osv_url_option`\n  + :func:`pyroSAR.snap.auxil.orb_parametrize`: `url_option`\n  + :func:`pyroSAR.snap.util.geocode`: `s1_osv_url_option`\n  + :func:`pyroSAR.snap.util.noise_power`: `osv_url_option`\n\n0.24.0 | 2024-01-10\n===================\n\nDrivers\n-------\n- new base attribute `coordinates`\n- enable method :meth:`~pyroSAR.drivers.ID.geometry` for all driver classes\n- classes :class:`~pyroSAR.drivers.ESA` and :class:`~pyroSAR.drivers.CEOS_ERS`: removed call to `gdalinfo`\n  (for increased test capability and speed)\n- outsourced regular expressions for product identification into separate module `patterns`\n\nAuxiliary Data Handling\n-----------------------\n- method :meth:`pyroSAR.S1.OSV.catch`: fixed bug in finding files starting in previous month\n\n0.25.0 | 2024-04-16\n===================\n\nDrivers\n-------\n- class :class:`pyroSAR.drivers.Archive`:\n\n  + replaced column `bbox` with `geometry`; requires database migration\n  + method :meth:`~pyroSAR.drivers.Archive.export2shp`: improved column name laundering\n\nSNAP API\n--------\n- function :func:`pyroSAR.snap.auxil.gpt`: fixed bug that occurred during removal of BNR node\n\nAncillary Tools\n---------------\n- new classes :class:`pyroSAR.ancillary.Lock` and :class:`pyroSAR.ancillary.LockCollection`\n  for custom file/folder locking\n\nAuxiliary Data Handling\n-----------------------\n- function :func:`pyroSAR.auxdata.dem_create`:\n\n  + make use of new classes :class:`~pyroSAR.ancillary.Lock` and :class:`~pyroSAR.ancillary.LockCollection`\n    for DEM download and mosaic creation (new argument `lock_timeout`)\n  + check whether all VRT source files exist\n\n0.26.0 | 2024-05-15\n===================\n\nSNAP API\n--------\n- compatibility with SNAP 10.\n- completely revised configuration mechanisms. See\n\n  + :doc:`/general/configuration`\n  + :class:`pyroSAR.examine.ExamineSnap`\n  + :class:`pyroSAR.examine.SnapProperties`\n\n0.26.1 | 2024-10-01\n===================\n\nDrivers\n-------\n- method :meth:`pyroSAR.drivers.Archive.select`: do not accept multi-feature vectorobjects\n\nSNAP API\n--------\n- fixed bug in writing SNAP properties configuration\n\nAuxiliary Data Handling\n-----------------------\n- class :class:`pyroSAR.auxdata.DEMHandler`: lock created VRT files\n\n0.27.0 | 2024-12-19\n===================\n\nAuxiliary Data Handling\n-----------------------\n- class :class:`pyroSAR.S1.OSV`: fixed bug in searching STEP OSV repository\n- function :func:`pyroSAR.auxdata.dem_create`: removed argument `lock_timeout`, do no longer lock the target file.\n- function :func:`pyroSAR.auxdata.dem_autoload`: the target VRT file is no longer locked. However, the individual downloaded DEM tiles now are.\n\nAncillary Tools\n---------------\n- classes :class:`~pyroSAR.ancillary.Lock` and :class:`~pyroSAR.ancillary.LockCollection`:\n  enable nested locking\n\nMisc\n----\n- removed upper Python dependency limit\n\n0.28.0 | 2025-02-20\n===================\n\nGeneral\n-------\n- support for SNAP 11 (tested, no modifications necessary)\n- support for Sentinel-1C and D\n\nDrivers\n-------\n- function :func:`pyroSAR.drivers.identify_many`: new argument `cores` for parallel scene identification\n- class :class:`pyroSAR.drivers.SAFE`: enable unzipping of products from CDSE\n\nAuxiliary Data Handling\n-----------------------\n- removed option for `TDX90m` DEM download because the FTP server has been shut down\n  (perhaps reactivated in the future if HTTPS authentication can be implemented)\n\n0.29.0 | 2025-04-09\n===================\n\nGeneral\n-------\n- extended support for Sentinel-1C and D\n\nDrivers\n-------\n- :meth:`pyroSAR.drivers.SAFE.geo_grid`: new method\n\n0.29.1 | 2025-05-12\n===================\n\nSNAP API\n--------\n- support for SNAP 12\n\n0.30.0 | 2025-05-14\n===================\n\nDrivers\n-------\n- changed polygon coordinate order to counter-clockwise for methods\n\n  - :meth:`pyroSAR.drivers.ID.bbox`\n  - :meth:`pyroSAR.drivers.ID.geometry`\n- method :meth:`pyroSAR.drivers.Archive.select`: new argument `return_value`\n\n0.30.1 | 2025-08-22\n===================\n\nDrivers\n-------\n- :class:`~pyroSAR.drivers.ESA`: read all `GEOLOCATION GRID ADS` segments to obtain GCPs, not just the first one (bugfix)\n\nGAMMA API\n---------\n- support for GAMMA version 20250625\n- support for polar stereographic projections (via :meth:`~pyroSAR.gamma.auxil.ISPPar.envidict`)\n- class :class:`~pyroSAR.gamma.auxil.ISPPar`: raise error if file type is unknown\n  (instead of setting the `filetype` attribute to `unknown`)\n- :func:`~pyroSAR.gamma.util.pixel_area_wrap`:\n\n  + create ENVI HDR files for inputs to :func:`~pyroSAR.gamma.util.lat_ratio` (bugfix)\n  + fixed bug in ignoring conditions for writing ENVI HDR files of `pix*` and `gs_ratio` products\n\n- improved readability of tests\n\n0.31.0 | 2025-09-23\n===================\n\nDrivers\n-------\n- :meth:`pyroSAR.drivers.ID.bbox`: new argument `buffer`\n- :class:`~pyroSAR.drivers.SAFE`, :class:`~pyroSAR.drivers.BEAM_DIMAP`: new argument `looks`\n- :class:`~pyroSAR.drivers.Archive`: context-manage all database handles (code improvement)\n\nGAMMA API\n---------\n- :func:`~pyroSAR.gamma.util.convert2gamma`, :func:`~pyroSAR.gamma.util.correctOSV`: add file locking\n- fixed argument names of `isp.MLI_cat`\n\n0.32.0 | 2025-10-29\n===================\n\nSNAP API\n--------\n- :func:`~pyroSAR.snap.auxil.orb_parametrize`: improved ERS/ASAR orbit handling (more work necessary to always select the best available file, because all options are limited in time (e.g. use option 1 if possible, fall back to option 2 otherwise, etc.); needs a download functionality like :class:`pyroSAR.S1.auxil.OSV` to know which ones are available)\n- :func:`~pyroSAR.snap.util.geocode`:\n\n  + explicitly use 'Latest Auxiliary File' for Envisat calibration (just for readability, this is already the default value of the parsed node; other options: 'Product Auxiliary File', 'External Auxiliary File')\n  + leave calibration node polarizations field empty when processing all polarizations (otherwise processing may finish without errors but no product is being written; looks like a SNAP bug, also reported in `step-44830 <https://forum.step.esa.int/t/naming-of-source-bands/44830>`_)\n  + `Calibration` in/out band handling improvements\n\n    * select source bands based on sensor and acquisition mode (also described in `step-44830 <https://forum.step.esa.int/t/naming-of-source-bands/44830>`_)\n    * more explicit handling of output bands: all that are not needed set to `False`\n    * commented out output bands that are apparently not needed\n\n  + fixed sarsim-cc geocoding:\n\n    * old: `SAR-Simulation->Cross-Correlation->Terrain-Flattening->SARSim-Terrain-Correction` (does not work because `Terrain-Flattening` does not pass through any source layers)\n    * new: `SAR-Simulation->Cross-Correlation->Warp->Terrain-Flattening->Terrain-Correction`\n    * this reveals a flaw in current SNAP processing: the additional `Warp` step introduces unnecessary resampling, the created lookup table is not passed between operators and thus makes the process inefficient, the whole procedure only works with EPSG:4326 as map geometry thus, by the looks of it, requiring three forward geocoding steps (for `SAR-Simulation`, `Terrain-Flattening` and `Terrain-Correction`, respectively)\n\n- :func:`~pyroSAR.snap.auxil.groupbyWorkers`: add `Warp` operator to the group of its source node, because it cannot be executed alone (just like `ThermalNoiseRemoval`)\n- ancillary layer writing fix: a layover-shadow-mask can also be created by `SAR-Simulation`, but the output layer is named differently ('layover_shadow_mask' instead of 'layoverShadowMask' by `Terrain-Correction`); this must be handled correctly in :func:`pyroSAR.snap.auxil.writer`\n\nDrivers\n-------\n- :class:`~pyroSAR.drivers.ESA`:\n\n  + :meth:`~pyroSAR.drivers.ESA.scanMetadata`:\n\n    * read out all MPH, SPH, DSD and GEOLOCATION_GRID_ADS metadata and expose it via `meta['origin']`\n    * use absolute orbit number as `frameNumber` instead of product counter (which often seems to be 0)\n    * convert original metadata to Python types (int, float, datetime)\n    * renamed several meta attributes:\n\n      - `incidenceAngleMin` -> `incidence_nr`\n      - `incidenceAngleMax` -> `incidence_fr`\n      - `rangeResolution`, `azimuthResolution` -> `resolution` (tuple)\n      - `neszNear`, `neszFar` -> `nesz` (tuple)\n\n  + new method :meth:`~pyroSAR.drivers.ESA.geo_grid` (like for `SAFE`)\n  + corrected `acquisition_mode` for ASAR WSM, WSS\n  + added MR product type\n\n- :class:`~pyroSAR.drivers.BEAM_DIMAP`\n\n  + improved metadata parsing\n\n    * `incidenceAngleMidSwath` not always present, use `incidence_near` and `incidence_far` alternatively\n    * the cycle number may be named `orbit_cycle` or `CYCLE`\n    * for pyroSAR `frameNumber`, use `ABS_ORBIT`, not `data_take_id` as for Sentinel-1\n    * added further `meta` attributes: `swath`, `looks`\n    * always four `Polarizations` fields present, some may be set to None -> filtered out\n    * for Sentinel-1 the product and acquisition_mode attributes can be obtained from `ACQUISITION_MODE` and `PRODUCT_TYPE` respectively; for ASAR/ERS `ACQUISITION_MODE` is missing and `PRODUCT_TYPE` contains the original values, e.g. 'ASA_APP_1P' -> must be abstracted\n\n  + added MR product type\n\n- :class:`~pyroSAR.drivers.ID`\n\n  + added methods `start_dt` and `stop_dt` returning timezone-aware datetime objects\n\nAncillary Tools\n---------------\n\n- :meth:`~pyroSAR.ancillary.multilook_factors`: fixed bug in returning 0 as range factor\n\n0.32.1 | 2025-11-06\n===================\n\nAuxiliary Data Handling\n-----------------------\n- class :class:`pyroSAR.S1.OSV`: lock local target files for download (to avoid multi-download and conflicts in parallel processes)\n\n0.33.0 | 2025-12-17\n===================\n\nDrivers\n-------\n- :class:`~pyroSAR.drivers.ESA`:\n\n    + convert coordinates in `meta['origin']` to floats\n    + read incident angles directly from metadata, not from custom mapping `ANGLES_RESOLUTION` (from which they have been removed)\n    + `ERS.mapping` renaming:\n\n          * `ANGLES_RESOLUTION` -> `RESOLUTION_NESZ`\n          * `get_angles_resolution` -> `get_resolution_nesz`\n          * `range` -> `res_rg`\n          * `azimuth` -> `res_az`\n          * `nesz_near` -> `nesz_nr`\n          * `nesz_far` -> `nesz_fr`\n\n    + made code more robust by reading SPH and DSD sizes from MPH\n    + added WSS mode to `RESOLUTION_NESZ` (although all values are just `None` because they could not be found yet)\n    + simplified code and added typing\n\n- :class:`~pyroSAR.drivers.BEAM_DIMAP`:\n\n    + more robust incident angle reading\n\nSNAP API\n--------\n- support for SNAP 13\n\nAncillary Tools\n---------------\n\n- :meth:`~pyroSAR.ancillary.multilook_factors`: complete reimplementation for more robustness\n\nAuxiliary Data Handling\n-----------------------\n- class :class:`pyroSAR.auxdata.DEMHandler`: handle ocean areas without DEM coverage using a dummy DEM spanning the target extent instead of the whole globe. The latter is no longer supported by GDAL.\n\n0.33.1 | 2026-01-19\n===================\n\nDrivers\n-------\n- :meth:`pyroSAR.drivers.SAFE.geo_grid`: fixed datetime handling bug by requiring spatialist>=0.16.2\n\n0.33.2 | 2026-01-21\n===================\n\nAuxiliary Data Handling\n-----------------------\n- :meth:`S1.OSV.__catch_step_auxdata` do not stop if no file was found on first URL\n\n0.33.3 | 2026-01-30\n===================\n\nGAMMA API\n---------\n- :class:`pyroSAR.gamma.auxil.ISPPar`: fixed `date` attribute handling\n\n0.34.0 | 2026-02-12\n===================\n\nDrivers\n-------\n- :class:`~pyroSAR.drivers.CEOS_PSR`: add new `meta` attributes `heading` and `heading_scene`\n\nAuxiliary Data Handling\n-----------------------\n- enable global search (the parameter `geometries` is now optional)\n- generation of local indices to reduce web traffic\n- option to work in offline mode\n\nAncillary Tools\n---------------\n- class :class:`~pyroSAR.ancillary.Lock`: fixed bug where lock file would remain on error if target does not exist\n\nSNAP API\n--------\n- :meth:`pyroSAR.examine.ExamineSnap.get_version`: more robust mechanism to read version information.\n  Only the version is returned as string now (instead of a dictionary with version and release date).\n\n- :meth:`pyroSAR.examine.SnapProperties`: support for `snap.conf` files\n\n0.34.1 | 2026-02-12\n===================\n\nSNAP API\n--------\n- :class:`pyroSAR.examine.ExamineSnap`: restore Python 3.10 compatibility (f-string parsing issue)\n\n0.34.2 | 2026-02-13\n===================\n\nAncillary Tools\n---------------\n- restored Python 3.10 compatibility (import `typing_extensions.Self` instead of `typing.Self` if necessary)\n\n0.34.3 | 2026-02-17\n===================\n\nSNAP API\n--------\n- :class:`pyroSAR.examine.ExamineSnap`: do not call SNAP to read version info in `__init__`\n\nAuxiliary Data Handling\n-----------------------\n- handle empty URL lists in `DEMHandler.__retrieve`\n\n0.34.4 | 2026-03-03\n===================\n\nSNAP API\n--------\n- :func:`pyroSAR.snap.auxil.erode_edges`: explictly open BEAM-DIMAP .img files with the ENVI driver.\n  This was necessary because GDAL 3.12 introduces a new `MiraMonRaster` driver, which is used per default for .img files.\n\nDrivers\n-------\n- use `MEM` instead of `Memory` as driver for creating in-memory :class:`spatialist.vector.Vector` objects. `Memory` has been deprecated.\n\n0.34.5 | 2026-03-06\n===================\n\nSNAP API\n--------\n- :meth:`pyroSAR.examine.ExamineSnap.get_version`: fixed bug where the X11 environment variable `DISPLAY` was preventing SNAP to start\n\nGAMMA API\n---------\n- handle subprocess signal kills like segmentation fault (SIGSEGV). Before these were just passed through, now a `RuntimeError` is raised.\n\n0.35.0 | 2026-03-09\n===================\n\nArchive\n-------\n- new module :mod:`pyroSAR.archive` extracted from :mod:`pyroSAR.drivers`\n- new protocol class :class:`pyroSAR.archive.SceneArchive` to establish an interface for scene search classes (inherited by :class:`pyroSAR.archive.Archive`).\n- method `Archive.encode` has been renamed to :meth:`~pyroSAR.archive.Archive.to_str` and has been reimplemented to be more predictable\n\nDrivers\n-------\n- :class:`~pyroSAR.drivers.ID`: deleted method `export2sqlite`\n\n0.36.0 | 2026-03-10\n===================\n\nGAMMA API\n---------\n\n- :func:`pyroSAR.gamma.dem.dem_import`:\n\n    + add `shellscript` argument\n    + consistently pass `logpath`, `outdir` and `shellscript` to GAMMA commands\n\n- :func:`pyroSAR.gamma.auxil.process`:\n\n    + replace environment variable `base` in the `shellscript` with `OUTDIR` and corrected its usage.\n      Before, the value of `outdir` in the command was just replaced with `$base`.\n      This lead to wrong scripts whenever different values for `outdir` were passed to `process`.\n      Now, no global variable is set and `OUTDIR` is redefined whenever the value of `outdir` changes, e.g.\n\n      .. code-block:: bash\n\n          OUTDIR=/xyz\n          command1 $OUTDIR\n          command2 $OUTDIR\n          OUTDIR=/abc\n          command3 $OUTDIR\n\n    + bugfix: the file header and the declaration of `GAMMA_HOME` are now written to the file even if `outdir=None`\n\n0.36.1 | 2026-03-24\n===================\n\nGAMMA API\n---------\n\n- :func:`pyroSAR.gamma.util.convert2gamma`: fix error in not removing thermal noise due to GAMMA interface change\n"
  },
  {
    "path": "docs/source/about/projects.rst",
    "content": "######################\nProjects using pyroSAR\n######################\n\npyroSAR is/was used in these projects:\n\n- `BACI <http://www.baci-h2020.eu/index.php/Main/HomePage>`_\n- `CCI Biomass <https://climate.esa.int/en/projects/biomass/>`_\n- `COPA <https://sentinel.esa.int/web/sentinel/sentinel-1-ard-normalised-radar-backscatter-nrb-product>`_\n- `EMSAfrica <https://www.emsafrica.org/>`_\n- `GlobBiomass <https://globbiomass.org/>`_\n- `SALDi <https://www.saldi.uni-jena.de/>`_\n- `SenThIS <https://eos-jena.com/en/projects/>`_\n- `Sentinel4REDD <https://www.dlr.de/rd/en/Portaldata/28/Resources/dokumente/re/Projektblatt_Sentinel4REDD_engl.pdf>`_\n- `SWOS <https://www.swos-service.eu/>`_\n- `BONDS <https://www.biodiversa.org/1418>`_\n\nYou know of other projects? We'd be happy to know.\n"
  },
  {
    "path": "docs/source/about/publications.rst",
    "content": "############\nPublications\n############\n\n.. bibliography::\n    :style: plain\n    :list: bullet\n    :filter: author % \"Truckenbrodt\""
  },
  {
    "path": "docs/source/about/references.rst",
    "content": ".. only:: html or text\n\n    References\n    ==========\n\n.. bibliography::\n    :style: plain\n"
  },
  {
    "path": "docs/source/api/ancillary.rst",
    "content": "Ancillary Functions\n===================\n\n.. automodule:: pyroSAR.ancillary\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        find_datasets\n        getargs\n        groupby\n        groupbyTime\n        hasarg\n        multilook_factors\n        parse_datasetname\n        seconds\n        Lock\n        LockCollection\n"
  },
  {
    "path": "docs/source/api/archive.rst",
    "content": "Archive\n=======\n\n.. automodule:: pyroSAR.archive\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        Archive\n        drop_archive\n"
  },
  {
    "path": "docs/source/api/auxdata.rst",
    "content": "Auxiliary Data Tools\n====================\n\n.. automodule:: pyroSAR.auxdata\n    :members: dem_autoload, dem_create, get_egm_lookup, getasse30_hdr, get_dem_options, DEMHandler\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        dem_autoload\n        dem_create\n        get_egm_lookup\n        getasse30_hdr\n        get_dem_options\n        DEMHandler\n"
  },
  {
    "path": "docs/source/api/datacube.rst",
    "content": "Datacube Tools\n==============\n\n.. automodule:: pyroSAR.datacube_util\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/source/api/drivers.rst",
    "content": "Drivers\n=======\n\n.. automodule:: pyroSAR.drivers\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n    .. rubric:: classes\n\n    .. autosummary::\n        :nosignatures:\n\n        ID\n        BEAM_DIMAP\n        CEOS_PSR\n        CEOS_ERS\n        EORC_PSR\n        ESA\n        SAFE\n        TSX\n        TDM\n\n    .. rubric:: functions\n\n    .. autosummary::\n        :nosignatures:\n\n        identify\n        identify_many\n        filter_processed\n        getFileObj\n        parse_date\n"
  },
  {
    "path": "docs/source/api/examine.rst",
    "content": "Examine\n=======\n\n.. automodule:: pyroSAR.examine\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:"
  },
  {
    "path": "docs/source/api/figures/snap_geocode.graphml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<graphml xmlns=\"http://graphml.graphdrawing.org/xmlns\" xmlns:java=\"http://www.yworks.com/xml/yfiles-common/1.0/java\" xmlns:sys=\"http://www.yworks.com/xml/yfiles-common/markup/primitives/2.0\" xmlns:x=\"http://www.yworks.com/xml/yfiles-common/markup/2.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:y=\"http://www.yworks.com/xml/graphml\" xmlns:yed=\"http://www.yworks.com/xml/yed/3\" xsi:schemaLocation=\"http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd\">\n  <!--Created by yEd 3.21.1-->\n  <key attr.name=\"Description\" attr.type=\"string\" for=\"graph\" id=\"d0\"/>\n  <key for=\"port\" id=\"d1\" yfiles.type=\"portgraphics\"/>\n  <key for=\"port\" id=\"d2\" yfiles.type=\"portgeometry\"/>\n  <key for=\"port\" id=\"d3\" yfiles.type=\"portuserdata\"/>\n  <key attr.name=\"url\" attr.type=\"string\" for=\"node\" id=\"d4\"/>\n  <key attr.name=\"description\" attr.type=\"string\" for=\"node\" id=\"d5\"/>\n  <key for=\"node\" id=\"d6\" yfiles.type=\"nodegraphics\"/>\n  <key for=\"graphml\" id=\"d7\" yfiles.type=\"resources\"/>\n  <key attr.name=\"url\" attr.type=\"string\" for=\"edge\" id=\"d8\"/>\n  <key attr.name=\"description\" attr.type=\"string\" for=\"edge\" id=\"d9\"/>\n  <key for=\"edge\" id=\"d10\" yfiles.type=\"edgegraphics\"/>\n  <graph edgedefault=\"directed\" id=\"G\">\n    <data key=\"d0\" xml:space=\"preserve\"/>\n    <node id=\"n0\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"85.0\" x=\"451.5279999999999\" y=\"-0.8119999999999976\"/>\n          <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"48.677734375\" x=\"18.1611328125\" xml:space=\"preserve\" y=\"5.649414062499986\">S1 GRD<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"roundrectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n1\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"131.0\" x=\"428.5279999999999\" y=\"240.628\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"126.0390625\" x=\"2.48046875\" xml:space=\"preserve\" y=\"5.6494140625\">ThermalNoiseRemoval<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n2\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"449.0279999999999\" y=\"376.628\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"87.349609375\" x=\"1.3251953125\" xml:space=\"preserve\" y=\"5.6494140625\">Apply-Orbit-File<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n3\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"449.0279999999999\" y=\"494.128\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"51.349609375\" x=\"19.3251953125\" xml:space=\"preserve\" y=\"5.6494140625\">Multilook<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n4\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"106.75999999999999\" x=\"440.6479999999999\" y=\"554.1279999999999\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"98.705078125\" x=\"4.0274609374999955\" xml:space=\"preserve\" y=\"5.6494140625\">Terrain-Flattening<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n5\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"106.75999999999999\" x=\"440.6479999999999\" y=\"704.1279999999999\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"101.359375\" x=\"2.7003124999999955\" xml:space=\"preserve\" y=\"5.6494140625\">Terrain-Correction<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n6\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"120.0\" x=\"587.0279999999997\" y=\"376.628\"/>\n          <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"116.025390625\" x=\"1.9873046875\" xml:space=\"preserve\" y=\"5.6494140625\">RESORB / POEORB<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"roundrectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n7\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"85.0\" x=\"587.0279999999997\" y=\"554.1279999999999\"/>\n          <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"30.666015625\" x=\"27.1669921875\" xml:space=\"preserve\" y=\"5.6494140625\">DEM<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"roundrectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n8\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.00000000000006\" x=\"449.0279999999999\" y=\"59.18799999999999\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"32.6875\" x=\"28.65625\" xml:space=\"preserve\" y=\"5.6494140625\">Read<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n9\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"105.43200000000002\" x=\"261.084\" y=\"240.628\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"99.35546875\" x=\"3.038265625000008\" xml:space=\"preserve\" y=\"5.6494140625\">TOPSAR-Deburst<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n10\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"85.0\" x=\"271.3\" y=\"-0.8119999999999976\"/>\n          <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"45.35546875\" x=\"19.822265625\" xml:space=\"preserve\" y=\"5.6494140625\">S1 SLC<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"roundrectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n11\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"131.0\" x=\"248.3\" y=\"180.628\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"126.0390625\" x=\"2.48046875\" xml:space=\"preserve\" y=\"5.6494140625\">ThermalNoiseRemoval<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n12\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.00000000000006\" x=\"268.8\" y=\"59.188\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"32.6875\" x=\"28.656250000000057\" xml:space=\"preserve\" y=\"5.6494140625\">Read<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n13\" yfiles.foldertype=\"group\">\n      <data key=\"d4\" xml:space=\"preserve\"/>\n      <data key=\"d6\">\n        <y:ProxyAutoBoundsNode>\n          <y:Realizers active=\"0\">\n            <y:GroupNode>\n              <y:Geometry height=\"391.81646484375\" width=\"190.50000000000136\" x=\"599.5059999999991\" y=\"-38.18846484375\"/>\n              <y:Fill color=\"#F5F5F5\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"left\" autoSizePolicy=\"node_width\" backgroundColor=\"#EBEBEB\" borderDistance=\"0.0\" fontFamily=\"Dialog\" fontSize=\"15\" fontStyle=\"plain\" hasLineColor=\"false\" height=\"22.37646484375\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"internal\" modelPosition=\"t\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"190.50000000000136\" x=\"0.0\" xml:space=\"preserve\" y=\"0.0\">multi-GRD</y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n              <y:State closed=\"false\" closedHeight=\"50.0\" closedWidth=\"50.0\" innerGraphDisplayEnabled=\"false\"/>\n              <y:Insets bottom=\"15\" bottomF=\"15.0\" left=\"15\" leftF=\"15.0\" right=\"15\" rightF=\"15.0\" top=\"15\" topF=\"15.0\"/>\n              <y:BorderInsets bottom=\"0\" bottomF=\"0.0\" left=\"0\" leftF=\"0.0\" right=\"0\" rightF=\"0.0\" top=\"0\" topF=\"0.0\"/>\n            </y:GroupNode>\n            <y:GroupNode>\n              <y:Geometry height=\"50.0\" width=\"50.0\" x=\"614.2559999999997\" y=\"83.25153515624999\"/>\n              <y:Fill color=\"#F5F5F5\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"right\" autoSizePolicy=\"node_width\" backgroundColor=\"#EBEBEB\" borderDistance=\"0.0\" fontFamily=\"Dialog\" fontSize=\"15\" fontStyle=\"plain\" hasLineColor=\"false\" height=\"22.37646484375\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"internal\" modelPosition=\"t\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"59.02685546875\" x=\"-4.513427734375\" xml:space=\"preserve\" y=\"0.0\">Folder 1</y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n              <y:State closed=\"true\" closedHeight=\"50.0\" closedWidth=\"50.0\" innerGraphDisplayEnabled=\"false\"/>\n              <y:Insets bottom=\"5\" bottomF=\"5.0\" left=\"5\" leftF=\"5.0\" right=\"5\" rightF=\"5.0\" top=\"5\" topF=\"5.0\"/>\n              <y:BorderInsets bottom=\"0\" bottomF=\"0.0\" left=\"0\" leftF=\"0.0\" right=\"0\" rightF=\"0.0\" top=\"0\" topF=\"0.0\"/>\n            </y:GroupNode>\n          </y:Realizers>\n        </y:ProxyAutoBoundsNode>\n      </data>\n      <graph edgedefault=\"directed\" id=\"n13:\">\n        <node id=\"n13::n0\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"85.0\" x=\"652.2559999999997\" y=\"-0.8119999999999976\"/>\n              <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"58.685546875\" x=\"13.1572265625\" xml:space=\"preserve\" y=\"5.6494140625\">S1 GRD 2<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n        <node id=\"n13::n1\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"131.0\" x=\"629.2559999999997\" y=\"240.628\"/>\n              <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"126.0390625\" x=\"2.48046875\" xml:space=\"preserve\" y=\"5.6494140625\">ThermalNoiseRemoval<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"rectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n        <node id=\"n13::n2\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"90.00000000000006\" x=\"649.7559999999997\" y=\"59.18799999999999\"/>\n              <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"32.6875\" x=\"28.65625\" xml:space=\"preserve\" y=\"5.6494140625\">Read<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"rectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n        <node id=\"n13::n3\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"85.0\" x=\"652.2559999999997\" y=\"308.628\"/>\n              <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"82.0234375\" x=\"1.48828125\" xml:space=\"preserve\" y=\"5.6494140625\">SliceAssembly<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"rectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n        <node id=\"n13::n4\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"160.50000000000136\" x=\"614.5059999999991\" y=\"120.62799999999999\"/>\n              <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"154.03515625\" x=\"3.232421875000682\" xml:space=\"preserve\" y=\"5.6494140625\">Remove-GRD-Border-Noise<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"rectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n        <node id=\"n13::n5\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"90.00000000000011\" x=\"649.7559999999999\" y=\"180.628\"/>\n              <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"61.36328125\" x=\"14.318359375000114\" xml:space=\"preserve\" y=\"5.6494140625\">Calibration<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"rectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n      </graph>\n    </node>\n    <node id=\"n14\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"449.0279999999999\" y=\"434.128\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"41.359375\" x=\"24.3203125\" xml:space=\"preserve\" y=\"5.6494140625\">Subset<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n15\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"85.0\" x=\"587.0279999999997\" y=\"434.128\"/>\n          <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"24.671875\" x=\"30.1640625\" xml:space=\"preserve\" y=\"5.6494140625\">AOI<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"roundrectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n16\" yfiles.foldertype=\"group\">\n      <data key=\"d4\" xml:space=\"preserve\"/>\n      <data key=\"d6\">\n        <y:ProxyAutoBoundsNode>\n          <y:Realizers active=\"0\">\n            <y:GroupNode>\n              <y:Geometry height=\"82.37646484375\" width=\"120.0\" x=\"167.0559999999998\" y=\"516.7515351562499\"/>\n              <y:Fill color=\"#F5F5F5\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"left\" autoSizePolicy=\"node_width\" backgroundColor=\"#EBEBEB\" borderDistance=\"0.0\" fontFamily=\"Dialog\" fontSize=\"15\" fontStyle=\"plain\" hasLineColor=\"false\" height=\"22.37646484375\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"internal\" modelPosition=\"t\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"120.0\" x=\"0.0\" xml:space=\"preserve\" y=\"0.0\">scatteringArea</y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n              <y:State closed=\"false\" closedHeight=\"50.0\" closedWidth=\"50.0\" innerGraphDisplayEnabled=\"false\"/>\n              <y:Insets bottom=\"15\" bottomF=\"15.0\" left=\"15\" leftF=\"15.0\" right=\"15\" rightF=\"15.0\" top=\"15\" topF=\"15.0\"/>\n              <y:BorderInsets bottom=\"0\" bottomF=\"0.0\" left=\"0\" leftF=\"0.0\" right=\"0\" rightF=\"0.0\" top=\"0\" topF=\"0.0\"/>\n            </y:GroupNode>\n            <y:GroupNode>\n              <y:Geometry height=\"50.0\" width=\"50.0\" x=\"0.0\" y=\"60.0\"/>\n              <y:Fill color=\"#F5F5F5\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"right\" autoSizePolicy=\"node_width\" backgroundColor=\"#EBEBEB\" borderDistance=\"0.0\" fontFamily=\"Dialog\" fontSize=\"15\" fontStyle=\"plain\" hasLineColor=\"false\" height=\"22.37646484375\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"internal\" modelPosition=\"t\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"59.02685546875\" x=\"-4.513427734375\" xml:space=\"preserve\" y=\"0.0\">Folder 2</y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n              <y:State closed=\"true\" closedHeight=\"50.0\" closedWidth=\"50.0\" innerGraphDisplayEnabled=\"false\"/>\n              <y:Insets bottom=\"5\" bottomF=\"5.0\" left=\"5\" leftF=\"5.0\" right=\"5\" rightF=\"5.0\" top=\"5\" topF=\"5.0\"/>\n              <y:BorderInsets bottom=\"0\" bottomF=\"0.0\" left=\"0\" leftF=\"0.0\" right=\"0\" rightF=\"0.0\" top=\"0\" topF=\"0.0\"/>\n            </y:GroupNode>\n          </y:Realizers>\n        </y:ProxyAutoBoundsNode>\n      </data>\n      <graph edgedefault=\"directed\" id=\"n16:\">\n        <node id=\"n16::n0\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"90.0\" x=\"182.0559999999998\" y=\"554.1279999999999\"/>\n              <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"64.703125\" x=\"12.6484375\" xml:space=\"preserve\" y=\"5.6494140625\">BandMaths<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"rectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n      </graph>\n    </node>\n    <node id=\"n17\" yfiles.foldertype=\"group\">\n      <data key=\"d4\" xml:space=\"preserve\"/>\n      <data key=\"d6\">\n        <y:ProxyAutoBoundsNode>\n          <y:Realizers active=\"0\">\n            <y:GroupNode>\n              <y:Geometry height=\"82.37646484375\" width=\"134.296\" x=\"296.7039999999999\" y=\"591.7515351562499\"/>\n              <y:Fill color=\"#F5F5F5\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"left\" autoSizePolicy=\"node_width\" backgroundColor=\"#EBEBEB\" borderDistance=\"0.0\" fontFamily=\"Dialog\" fontSize=\"15\" fontStyle=\"plain\" hasLineColor=\"false\" height=\"22.37646484375\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"internal\" modelPosition=\"t\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"134.296\" x=\"0.0\" xml:space=\"preserve\" y=\"0.0\">gammaSigmaRatio</y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n              <y:State closed=\"false\" closedHeight=\"50.0\" closedWidth=\"50.0\" innerGraphDisplayEnabled=\"false\"/>\n              <y:Insets bottom=\"15\" bottomF=\"15.0\" left=\"15\" leftF=\"15.0\" right=\"15\" rightF=\"15.0\" top=\"15\" topF=\"15.0\"/>\n              <y:BorderInsets bottom=\"0\" bottomF=\"0.0\" left=\"5\" leftF=\"5.092000000000041\" right=\"9\" rightF=\"9.20399999999995\" top=\"0\" topF=\"0.0\"/>\n            </y:GroupNode>\n            <y:GroupNode>\n              <y:Geometry height=\"50.0\" width=\"50.0\" x=\"0.0\" y=\"60.0\"/>\n              <y:Fill color=\"#F5F5F5\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"right\" autoSizePolicy=\"node_width\" backgroundColor=\"#EBEBEB\" borderDistance=\"0.0\" fontFamily=\"Dialog\" fontSize=\"15\" fontStyle=\"plain\" hasLineColor=\"false\" height=\"22.37646484375\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"internal\" modelPosition=\"t\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"59.02685546875\" x=\"-4.513427734375\" xml:space=\"preserve\" y=\"0.0\">Folder 3</y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n              <y:State closed=\"true\" closedHeight=\"50.0\" closedWidth=\"50.0\" innerGraphDisplayEnabled=\"false\"/>\n              <y:Insets bottom=\"5\" bottomF=\"5.0\" left=\"5\" leftF=\"5.0\" right=\"5\" rightF=\"5.0\" top=\"5\" topF=\"5.0\"/>\n              <y:BorderInsets bottom=\"0\" bottomF=\"0.0\" left=\"0\" leftF=\"0.0\" right=\"0\" rightF=\"0.0\" top=\"0\" topF=\"0.0\"/>\n            </y:GroupNode>\n          </y:Realizers>\n        </y:ProxyAutoBoundsNode>\n      </data>\n      <graph edgedefault=\"directed\" id=\"n17:\">\n        <node id=\"n17::n0\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"90.0\" x=\"316.79599999999994\" y=\"629.1279999999999\"/>\n              <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"64.703125\" x=\"12.6484375\" xml:space=\"preserve\" y=\"5.6494140625\">BandMaths<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"rectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n      </graph>\n    </node>\n    <node id=\"n18\" yfiles.foldertype=\"group\">\n      <data key=\"d4\" xml:space=\"preserve\"/>\n      <data key=\"d6\">\n        <y:ProxyAutoBoundsNode>\n          <y:Realizers active=\"0\">\n            <y:GroupNode>\n              <y:Geometry height=\"202.37646484375\" width=\"165.12400000000133\" x=\"316.79599999999994\" y=\"756.7515351562499\"/>\n              <y:Fill color=\"#F5F5F5\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"left\" autoSizePolicy=\"node_width\" backgroundColor=\"#EBEBEB\" borderDistance=\"0.0\" fontFamily=\"Dialog\" fontSize=\"15\" fontStyle=\"plain\" hasLineColor=\"false\" height=\"22.37646484375\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"internal\" modelPosition=\"t\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"165.12400000000133\" x=\"0.0\" xml:space=\"preserve\" y=\"0.0\">export_extra</y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n              <y:State closed=\"false\" closedHeight=\"50.0\" closedWidth=\"50.0\" innerGraphDisplayEnabled=\"false\"/>\n              <y:Insets bottom=\"15\" bottomF=\"15.0\" left=\"15\" leftF=\"15.0\" right=\"15\" rightF=\"15.0\" top=\"15\" topF=\"15.0\"/>\n              <y:BorderInsets bottom=\"0\" bottomF=\"0.0\" left=\"19\" leftF=\"19.36400000000134\" right=\"0\" rightF=\"0.0\" top=\"0\" topF=\"0.0\"/>\n            </y:GroupNode>\n            <y:GroupNode>\n              <y:Geometry height=\"50.0\" width=\"50.0\" x=\"0.0\" y=\"60.0\"/>\n              <y:Fill color=\"#F5F5F5\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"right\" autoSizePolicy=\"node_width\" backgroundColor=\"#EBEBEB\" borderDistance=\"0.0\" fontFamily=\"Dialog\" fontSize=\"15\" fontStyle=\"plain\" hasLineColor=\"false\" height=\"22.37646484375\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"internal\" modelPosition=\"t\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"59.02685546875\" x=\"-4.513427734375\" xml:space=\"preserve\" y=\"0.0\">Folder 4</y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n              <y:State closed=\"true\" closedHeight=\"50.0\" closedWidth=\"50.0\" innerGraphDisplayEnabled=\"false\"/>\n              <y:Insets bottom=\"5\" bottomF=\"5.0\" left=\"5\" leftF=\"5.0\" right=\"5\" rightF=\"5.0\" top=\"5\" topF=\"5.0\"/>\n              <y:BorderInsets bottom=\"0\" bottomF=\"0.0\" left=\"0\" leftF=\"0.0\" right=\"0\" rightF=\"0.0\" top=\"0\" topF=\"0.0\"/>\n            </y:GroupNode>\n          </y:Realizers>\n        </y:ProxyAutoBoundsNode>\n      </data>\n      <graph edgedefault=\"directed\" id=\"n18:\">\n        <node id=\"n18::n0\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"90.0\" x=\"364.04000000000127\" y=\"794.1279999999999\"/>\n              <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"31.99609375\" x=\"29.001953125\" xml:space=\"preserve\" y=\"5.6494140625\">Write<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"rectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n        <node id=\"n18::n1\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"115.75999999999999\" x=\"351.1600000000013\" y=\"854.1279999999999\"/>\n              <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"110.740234375\" x=\"2.5098828124999955\" xml:space=\"preserve\" y=\"5.6494140625\">localIncidenceAngle<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n        <node id=\"n18::n2\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"106.75999999999999\" x=\"355.6600000000013\" y=\"914.1279999999999\"/>\n              <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"14.001953125\" x=\"46.379023437499995\" xml:space=\"preserve\" y=\"5.6494140625\">...<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n      </graph>\n    </node>\n    <node id=\"n19\" yfiles.foldertype=\"group\">\n      <data key=\"d4\" xml:space=\"preserve\"/>\n      <data key=\"d6\">\n        <y:ProxyAutoBoundsNode>\n          <y:Realizers active=\"0\">\n            <y:GroupNode>\n              <y:Geometry height=\"202.37646484375\" width=\"164.29600000000005\" x=\"504.6520000000012\" y=\"756.7515351562499\"/>\n              <y:Fill color=\"#F5F5F5\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"left\" autoSizePolicy=\"node_width\" backgroundColor=\"#EBEBEB\" borderDistance=\"0.0\" fontFamily=\"Dialog\" fontSize=\"15\" fontStyle=\"plain\" hasLineColor=\"false\" height=\"22.37646484375\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"internal\" modelPosition=\"t\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"164.29600000000005\" x=\"0.0\" xml:space=\"preserve\" y=\"0.0\">backscatter</y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n              <y:State closed=\"false\" closedHeight=\"50.0\" closedWidth=\"50.0\" innerGraphDisplayEnabled=\"false\"/>\n              <y:Insets bottom=\"15\" bottomF=\"15.0\" left=\"15\" leftF=\"15.0\" right=\"15\" rightF=\"15.0\" top=\"15\" topF=\"15.0\"/>\n              <y:BorderInsets bottom=\"0\" bottomF=\"0.0\" left=\"0\" leftF=\"0.0\" right=\"0\" rightF=\"0.0\" top=\"0\" topF=\"0.0\"/>\n            </y:GroupNode>\n            <y:GroupNode>\n              <y:Geometry height=\"50.0\" width=\"50.0\" x=\"0.0\" y=\"60.0\"/>\n              <y:Fill color=\"#F5F5F5\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"right\" autoSizePolicy=\"node_width\" backgroundColor=\"#EBEBEB\" borderDistance=\"0.0\" fontFamily=\"Dialog\" fontSize=\"15\" fontStyle=\"plain\" hasLineColor=\"false\" height=\"22.37646484375\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"internal\" modelPosition=\"t\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"59.02685546875\" x=\"-4.513427734375\" xml:space=\"preserve\" y=\"0.0\">Folder 5</y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n              <y:State closed=\"true\" closedHeight=\"50.0\" closedWidth=\"50.0\" innerGraphDisplayEnabled=\"false\"/>\n              <y:Insets bottom=\"5\" bottomF=\"5.0\" left=\"5\" leftF=\"5.0\" right=\"5\" rightF=\"5.0\" top=\"5\" topF=\"5.0\"/>\n              <y:BorderInsets bottom=\"0\" bottomF=\"0.0\" left=\"0\" leftF=\"0.0\" right=\"0\" rightF=\"0.0\" top=\"0\" topF=\"0.0\"/>\n            </y:GroupNode>\n          </y:Realizers>\n        </y:ProxyAutoBoundsNode>\n      </data>\n      <graph edgedefault=\"directed\" id=\"n19:\">\n        <node id=\"n19::n0\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"98.38\" x=\"537.6100000000013\" y=\"794.1279999999999\"/>\n              <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"94.03515625\" x=\"2.1724218750000546\" xml:space=\"preserve\" y=\"5.6494140625\">LinearToFromdB<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"rectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n        <node id=\"n19::n1\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"90.0\" x=\"541.8000000000013\" y=\"854.1279999999999\"/>\n              <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"49.99609375\" x=\"20.001953125\" xml:space=\"preserve\" y=\"5.6494140625\">Write (2)<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"rectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n        <node id=\"n19::n2\">\n          <data key=\"d6\">\n            <y:ShapeNode>\n              <y:Geometry height=\"30.0\" width=\"134.29600000000005\" x=\"519.6520000000012\" y=\"914.1279999999999\"/>\n              <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n              <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n              <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"128.716796875\" x=\"2.7896015625000246\" xml:space=\"preserve\" y=\"5.6494140625\">sigma0/gamma0 elp/rtc<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n              <y:Shape type=\"roundrectangle\"/>\n            </y:ShapeNode>\n          </data>\n        </node>\n      </graph>\n    </node>\n    <node id=\"n20\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"449.0279999999999\" y=\"180.628\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"61.36328125\" x=\"14.318359375\" xml:space=\"preserve\" y=\"5.6494140625\">Calibration<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n21\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"160.50000000000136\" x=\"413.77799999999917\" y=\"120.62799999999999\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"dashed\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"154.03515625\" x=\"3.232421875000682\" xml:space=\"preserve\" y=\"5.6494140625\">Remove-GRD-Border-Noise<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n22\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"268.80000000000007\" y=\"120.62799999999999\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"18.701171875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"61.36328125\" x=\"14.318359375\" xml:space=\"preserve\" y=\"5.6494140625\">Calibration<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <edge id=\"n19::e0\" source=\"n19::n0\" target=\"n19::n1\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e0\" source=\"n5\" target=\"n18::n0\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"90.0\" tx=\"27.259999999998627\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e1\" source=\"n3\" target=\"n4\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e2\" source=\"n4\" target=\"n5\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e3\" source=\"n6\" target=\"n2\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e4\" source=\"n7\" target=\"n4\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e5\" source=\"n7\" target=\"n5\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"629.5279999999997\" y=\"719.1279999999999\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"n19::e1\" source=\"n19::n1\" target=\"n19::n2\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"n18::e0\" source=\"n18::n0\" target=\"n18::n1\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"328.2519999999999\" y=\"809.1279999999999\"/>\n            <y:Point x=\"328.2519999999999\" y=\"869.1279999999999\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"n18::e1\" source=\"n18::n0\" target=\"n18::n2\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"328.2519999999999\" y=\"809.1279999999999\"/>\n            <y:Point x=\"328.2519999999999\" y=\"929.1279999999999\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e6\" source=\"n0\" target=\"n8\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e7\" source=\"n5\" target=\"n19::n0\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"494.0279999999999\" y=\"809.1279999999999\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e8\" source=\"n1\" target=\"n13::n3\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e9\" source=\"n10\" target=\"n12\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e10\" source=\"n11\" target=\"n9\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e11\" source=\"n9\" target=\"n2\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"313.8\" y=\"391.628\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"n13::e0\" source=\"n13::n0\" target=\"n13::n2\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"n13::e1\" source=\"n13::n1\" target=\"n13::n3\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e12\" source=\"n2\" target=\"n14\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e13\" source=\"n15\" target=\"n14\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e14\" source=\"n4\" target=\"n17::n0\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"362.0559999999998\" y=\"569.1279999999999\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e15\" source=\"n17::n0\" target=\"n5\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"362.0559999999998\" y=\"719.1279999999999\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e16\" source=\"n3\" target=\"n16::n0\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"-23.52799999999945\" sy=\"-0.12799999999992906\" tx=\"0.0\" ty=\"-0.12799999999992906\">\n            <y:Point x=\"227.0559999999998\" y=\"509.00000000000006\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e17\" source=\"n16::n0\" target=\"n5\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"-132.52799999999945\" ty=\"0.0\">\n            <y:Point x=\"227.11199999999963\" y=\"719.1279999999999\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"none\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e18\" source=\"n4\" target=\"n16::n0\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"-131.84799999999944\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e19\" source=\"n8\" target=\"n21\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e20\" source=\"n21\" target=\"n20\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e21\" source=\"n20\" target=\"n1\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e22\" source=\"n1\" target=\"n2\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e23\" source=\"n13::n3\" target=\"n2\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"494.0279999999999\" y=\"323.628\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e24\" source=\"n14\" target=\"n3\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e25\" source=\"n12\" target=\"n22\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e26\" source=\"n22\" target=\"n11\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"n13::e2\" source=\"n13::n2\" target=\"n13::n4\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"n13::e3\" source=\"n13::n4\" target=\"n13::n5\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"n13::e4\" source=\"n13::n5\" target=\"n13::n1\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"dashed\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n  </graph>\n  <data key=\"d7\">\n    <y:Resources/>\n  </data>\n</graphml>\n"
  },
  {
    "path": "docs/source/api/figures/workflow_readme.txt",
    "content": "workflow files were created with the yED Graph Editor (https://www.yworks.com/products/yed)\n\nsetting the vector bridge style:\nPreferences -> Display -> Bridge Style\n"
  },
  {
    "path": "docs/source/api/gamma/api.rst",
    "content": ".. _gamma-command-api:\n\nGAMMA Command API\n-----------------\n\nThis is an attempt to make it easier to execute GAMMA commands by offering automatically parsed Python functions.\nThus, instead of executing the command via shell:\n\n.. code-block:: shell\n\n    offset_fit offs ccp off.par coffs - 0.15 3 0 > offset_fit.log\n\none can wrap it in a Python script:\n\n.. code-block:: python\n\n    import os\n    from pyroSAR.gamma.api import isp\n\n    workdir = '/data/gamma_workdir'\n\n    parameters = {'offs': os.path.join(workdir, 'offs'),\n                  'ccp': os.path.join(workdir, 'ccp'),\n                  'OFF_par': os.path.join(workdir, 'off.par'),\n                  'coffs': os.path.join(workdir, 'coffs'),\n                  'thres': 0.15,\n                  'npoly': 3,\n                  'interact_flag': 0,\n                  'logpath': workdir}\n\n    isp.offset_fit(**parameters)\n\nA file `offset_fit.log` containing the output of the command is written in both cases. Any parameters, which should\nnot be written and need to be set to - in the shell can be omitted in the Python call since all optional parameters\nof the functions are already defined with '-' as a default.\nThe documentation can be called like with any Python function:\n\n.. code-block:: python\n\n    from pyroSAR.gamma.api import isp\n    help(isp.offset_fit)\n\nParser Documentation\n********************\n\n.. automodule:: pyroSAR.gamma.parser\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\nAPI Demo\n********\n\nThis is a demonstration of an output script as generated automatically by function\n:func:`~pyroSAR.gamma.parser.parse_module` for the GAMMA module `ISP`.\nWithin each function, the command name and all parameters are passed to function\n:func:`~pyroSAR.gamma.process`, which converts all input to :py:obj:`str` and then calls the command via the\n:mod:`subprocess` module.\n\n.. automodule:: pyroSAR.gamma.parser_demo\n    :members:\n    :undoc-members:\n    :show-inheritance:"
  },
  {
    "path": "docs/source/api/gamma/auxil.rst",
    "content": "Auxiliary functionality\n-----------------------\n\n.. automodule:: pyroSAR.gamma.auxil\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        do_execute\n        ISPPar\n        Namespace\n        par2hdr\n        process\n        slc_corners\n        Spacing\n        UTM"
  },
  {
    "path": "docs/source/api/gamma/dem.rst",
    "content": "DEM tools\n---------\n\n.. automodule:: pyroSAR.gamma.dem\n    :members: dem_autocreate, dem_import, dempar, fill, hgt, hgt_collect, makeSRTM, mosaic, swap\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        dem_autocreate\n        dem_import\n        dempar\n        fill\n        hgt\n        hgt_collect\n        makeSRTM\n        mosaic\n        swap"
  },
  {
    "path": "docs/source/api/gamma/error.rst",
    "content": "Error handling\n--------------\n\n.. automodule:: pyroSAR.gamma.error\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        gammaErrorHandler\n        GammaUnknownError"
  },
  {
    "path": "docs/source/api/gamma/figures/gamma_geocode.graphml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<graphml xmlns=\"http://graphml.graphdrawing.org/xmlns\" xmlns:java=\"http://www.yworks.com/xml/yfiles-common/1.0/java\" xmlns:sys=\"http://www.yworks.com/xml/yfiles-common/markup/primitives/2.0\" xmlns:x=\"http://www.yworks.com/xml/yfiles-common/markup/2.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:y=\"http://www.yworks.com/xml/graphml\" xmlns:yed=\"http://www.yworks.com/xml/yed/3\" xsi:schemaLocation=\"http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd\">\n  <!--Created by yEd 3.19.1.1-->\n  <key attr.name=\"Description\" attr.type=\"string\" for=\"graph\" id=\"d0\"/>\n  <key for=\"port\" id=\"d1\" yfiles.type=\"portgraphics\"/>\n  <key for=\"port\" id=\"d2\" yfiles.type=\"portgeometry\"/>\n  <key for=\"port\" id=\"d3\" yfiles.type=\"portuserdata\"/>\n  <key attr.name=\"url\" attr.type=\"string\" for=\"node\" id=\"d4\"/>\n  <key attr.name=\"description\" attr.type=\"string\" for=\"node\" id=\"d5\"/>\n  <key for=\"node\" id=\"d6\" yfiles.type=\"nodegraphics\"/>\n  <key for=\"graphml\" id=\"d7\" yfiles.type=\"resources\"/>\n  <key attr.name=\"url\" attr.type=\"string\" for=\"edge\" id=\"d8\"/>\n  <key attr.name=\"description\" attr.type=\"string\" for=\"edge\" id=\"d9\"/>\n  <key for=\"edge\" id=\"d10\" yfiles.type=\"edgegraphics\"/>\n  <graph edgedefault=\"directed\" id=\"G\">\n    <data key=\"d0\" xml:space=\"preserve\"/>\n    <node id=\"n0\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"597.0000000000002\" y=\"179.50000000000009\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"78.033203125\" x=\"5.9833984375\" xml:space=\"preserve\" y=\"6.015625\">par_S1_GRD<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n1\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"712.0000000000002\" y=\"236.50000000000009\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"87.701171875\" x=\"1.1494140625\" xml:space=\"preserve\" y=\"6.015625000000028\">S1_OPOD_vec<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n2\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"597.0000000000002\" y=\"299.5000000000001\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"92.212890625\" x=\"-1.1064453125\" xml:space=\"preserve\" y=\"6.015625\">multi_look_MLI<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n3\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"398.00000000000017\" y=\"366.0000000000001\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"50.875\" x=\"19.5625\" xml:space=\"preserve\" y=\"6.015625\">gc_map<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n4\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"597.0000000000002\" y=\"513.7500000000001\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"65.79296875\" x=\"12.103515625\" xml:space=\"preserve\" y=\"6.015625\">pixel_area<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n5\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"739.5000000000002\" y=\"573.7500000000001\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"50.41796875\" x=\"19.791015625\" xml:space=\"preserve\" y=\"6.015625\">product<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n6\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"597.0000000000002\" y=\"646.7500000000001\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"89.798828125\" x=\"0.1005859375\" xml:space=\"preserve\" y=\"6.015625\">geocode_back<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n7\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"597.0000000000002\" y=\"766.7500000000001\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"77.83984375\" x=\"6.080078125\" xml:space=\"preserve\" y=\"6.015625\">linear_to_dB<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n8\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"85.0\" x=\"599.5000000000002\" y=\"119.50000000000009\"/>\n          <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"49.943359375\" x=\"17.5283203125\" xml:space=\"preserve\" y=\"6.015625\">S1 GRD<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"roundrectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n9\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"85.0\" x=\"400.50000000000017\" y=\"281.5000000000001\"/>\n          <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"31.17578125\" x=\"26.912109375\" xml:space=\"preserve\" y=\"6.015625\">DEM<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"roundrectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n10\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"129.0319999999998\" x=\"577.9960000000003\" y=\"826.7500000000001\"/>\n          <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"128.095703125\" x=\"0.4681484374999627\" xml:space=\"preserve\" y=\"6.015625\">gamma0-rtc_geo_dB<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"roundrectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n11\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"398.00000000000017\" y=\"766.7500000000001\"/>\n          <y:Fill color=\"#EEFFCC\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"89.798828125\" x=\"0.1005859375\" xml:space=\"preserve\" y=\"6.015625\">geocode_back<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"rectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n12\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"85.0\" x=\"400.50000000000017\" y=\"826.7500000000001\"/>\n          <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"49.87890625\" x=\"17.560546875\" xml:space=\"preserve\" y=\"6.015625\">inc_geo<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"roundrectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n13\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"30.0\" x=\"627.0000000000002\" y=\"236.50000000000009\"/>\n          <y:Fill color=\"#E7F2FA\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"22.357421875\" x=\"3.8212890625\" xml:space=\"preserve\" y=\"6.015625000000028\">mli<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"parallelogram\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n14\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"36.0\" x=\"624.0000000000002\" y=\"366.0000000000001\"/>\n          <y:Fill color=\"#E7F2FA\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"29.9921875\" x=\"3.00390625\" xml:space=\"preserve\" y=\"6.015625\">mli2<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"parallelogram\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n15\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"30.0\" x=\"478.75000000000017\" y=\"426.0000000000001\"/>\n          <y:Fill color=\"#E7F2FA\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"30.689453125\" x=\"-0.3447265625\" xml:space=\"preserve\" y=\"6.015625\">dem<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"parallelogram\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n16\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"30.0\" x=\"377.25000000000017\" y=\"426.0000000000001\"/>\n          <y:Fill color=\"#E7F2FA\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"21.537109375\" x=\"4.2314453125\" xml:space=\"preserve\" y=\"6.015625\">inc<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"parallelogram\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n17\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"30.0\" x=\"428.00000000000017\" y=\"426.0000000000001\"/>\n          <y:Fill color=\"#E7F2FA\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"19.64453125\" x=\"5.177734375\" xml:space=\"preserve\" y=\"6.015625\">lut<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"parallelogram\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n18\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"30.0\" x=\"529.5000000000002\" y=\"426.0000000000001\"/>\n          <y:Fill color=\"#E7F2FA\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"13.5859375\" x=\"8.20703125\" xml:space=\"preserve\" y=\"6.015625\">ls<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"parallelogram\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n19\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"99.07999999999993\" x=\"592.4600000000003\" y=\"573.7500000000001\"/>\n          <y:Fill color=\"#E7F2FA\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"89.166015625\" x=\"4.956992187499964\" xml:space=\"preserve\" y=\"6.015625\">sig2gam_ratio<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"parallelogram\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n20\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"90.0\" x=\"739.5000000000002\" y=\"646.7500000000001\"/>\n          <y:Fill color=\"#E7F2FA\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"77.904296875\" x=\"6.0478515625\" xml:space=\"preserve\" y=\"6.015625\">gamma0-rtc<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"parallelogram\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n21\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"122.8879999999998\" x=\"580.5560000000003\" y=\"706.7500000000001\"/>\n          <y:Fill color=\"#E7F2FA\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"106.24609375\" x=\"8.320953124999846\" xml:space=\"preserve\" y=\"6.015625\">gamma0-rtc_geo<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"parallelogram\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <node id=\"n22\">\n      <data key=\"d6\">\n        <y:ShapeNode>\n          <y:Geometry height=\"30.0\" width=\"85.0\" x=\"714.5000000000002\" y=\"119.50000000000009\"/>\n          <y:Fill color=\"#6AB0DE\" transparent=\"false\"/>\n          <y:BorderStyle color=\"#000000\" raised=\"false\" type=\"line\" width=\"1.0\"/>\n          <y:NodeLabel alignment=\"center\" autoSizePolicy=\"content\" fontFamily=\"Dialog\" fontSize=\"12\" fontStyle=\"plain\" hasBackgroundColor=\"false\" hasLineColor=\"false\" height=\"17.96875\" horizontalTextPosition=\"center\" iconTextGap=\"4\" modelName=\"custom\" textColor=\"#000000\" verticalTextPosition=\"bottom\" visible=\"true\" width=\"54.279296875\" x=\"15.3603515625\" xml:space=\"preserve\" y=\"6.015625\">POEORB<y:LabelModel><y:SmartNodeLabelModel distance=\"4.0\"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX=\"0.0\" labelRatioY=\"0.0\" nodeRatioX=\"0.0\" nodeRatioY=\"0.0\" offsetX=\"0.0\" offsetY=\"0.0\" upX=\"0.0\" upY=\"-1.0\"/></y:ModelParameter></y:NodeLabel>\n          <y:Shape type=\"roundrectangle\"/>\n        </y:ShapeNode>\n      </data>\n    </node>\n    <edge id=\"e0\" source=\"n0\" target=\"n13\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e1\" source=\"n13\" target=\"n2\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e2\" source=\"n1\" target=\"n13\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e3\" source=\"n2\" target=\"n14\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e4\" source=\"n3\" target=\"n15\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"443.00000000000017\" y=\"410.4330726400001\"/>\n            <y:Point x=\"493.75000000000017\" y=\"410.4330726400001\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e5\" source=\"n3\" target=\"n16\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"443.00000000000017\" y=\"410.4330726400001\"/>\n            <y:Point x=\"392.25000000000017\" y=\"410.4330726400001\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e6\" source=\"n14\" target=\"n4\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e7\" source=\"n14\" target=\"n3\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e8\" source=\"n8\" target=\"n0\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e9\" source=\"n9\" target=\"n3\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e10\" source=\"n3\" target=\"n17\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"443.00000000000017\" y=\"410.4330726400001\"/>\n            <y:Point x=\"443.00000000000017\" y=\"410.4330726400001\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e11\" source=\"n15\" target=\"n4\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e12\" source=\"n17\" target=\"n4\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e13\" source=\"n16\" target=\"n4\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"392.25000000000017\" y=\"528.7500000000001\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e14\" source=\"n18\" target=\"n4\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e15\" source=\"n14\" target=\"n5\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"784.5000000000002\" y=\"381.0000000000001\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e16\" source=\"n19\" target=\"n5\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e17\" source=\"n4\" target=\"n19\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e18\" source=\"n5\" target=\"n20\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e19\" source=\"n20\" target=\"n6\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e20\" source=\"n17\" target=\"n6\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"443.00000000000017\" y=\"661.7500000000001\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e21\" source=\"n21\" target=\"n7\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e22\" source=\"n3\" target=\"n18\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"443.00000000000017\" y=\"410.4330726400001\"/>\n            <y:Point x=\"544.5000000000002\" y=\"410.4330726400001\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e23\" source=\"n16\" target=\"n11\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"392.25000000000017\" y=\"781.7500000000001\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e24\" source=\"n17\" target=\"n11\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e25\" source=\"n11\" target=\"n12\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e26\" source=\"n22\" target=\"n1\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e27\" source=\"n7\" target=\"n10\">\n      <data key=\"d10\">\n        <y:PolyLineEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\"/>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:BendStyle smoothed=\"false\"/>\n        </y:PolyLineEdge>\n      </data>\n    </edge>\n    <edge id=\"e28\" source=\"n6\" target=\"n21\">\n      <data key=\"d9\"/>\n      <data key=\"d10\">\n        <y:ArcEdge>\n          <y:Path sx=\"0.0\" sy=\"0.0\" tx=\"0.0\" ty=\"0.0\">\n            <y:Point x=\"642.0\" y=\"691.75\"/>\n          </y:Path>\n          <y:LineStyle color=\"#000000\" type=\"line\" width=\"1.0\"/>\n          <y:Arrows source=\"none\" target=\"standard\"/>\n          <y:Arc height=\"0.0\" ratio=\"0.0\" type=\"fixedRatio\"/>\n        </y:ArcEdge>\n      </data>\n    </edge>\n  </graph>\n  <data key=\"d7\">\n    <y:Resources/>\n  </data>\n</graphml>\n"
  },
  {
    "path": "docs/source/api/gamma/index.rst",
    "content": "GAMMA\n=====\n\n.. toctree::\n    :maxdepth: 1\n\n    util\n    auxil\n    dem\n    api\n    error"
  },
  {
    "path": "docs/source/api/gamma/util.rst",
    "content": "Processing\n----------\n\n.. automodule:: pyroSAR.gamma.util\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        calibrate\n        convert2gamma\n        correctOSV\n        gc_map_wrap\n        geocode\n        lat_linear_to_db\n        lat_product\n        lat_ratio\n        multilook\n        ovs\n        pixel_area_wrap\n        S1_deburst"
  },
  {
    "path": "docs/source/api/sentinel-1.rst",
    "content": "Sentinel-1 Tools\n================\n\n.. automodule:: pyroSAR.S1\n    :members: OSV, removeGRDBorderNoise\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        OSV\n        removeGRDBorderNoise\n"
  },
  {
    "path": "docs/source/api/snap.rst",
    "content": "SNAP\n====\n\nProcessing\n----------\n\n.. automodule:: pyroSAR.snap.util\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        geocode\n        noise_power\n\nWorkflow Parsing and Execution\n------------------------------\n\n.. automodule:: pyroSAR.snap.auxil\n    :members: gpt, execute, parse_node, parse_recipe, split, groupbyWorkers, Workflow, Node, Par, Par_BandMath, dem_parametrize, geo_parametrize, mli_parametrize, orb_parametrize, sub_parametrize\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        gpt\n        execute\n        parse_node\n        parse_recipe\n        split\n        groupbyWorkers\n        Workflow\n        Node\n        Par\n        Par_BandMath\n        dem_parametrize\n        geo_parametrize\n        mli_parametrize\n        orb_parametrize\n        sub_parametrize\n\nGeneral Utilities\n-----------------\n\n.. automodule:: pyroSAR.snap.auxil\n    :members: erode_edges, writer\n    :undoc-members:\n    :show-inheritance:\n\n    .. autosummary::\n        :nosignatures:\n\n        erode_edges\n        writer\n"
  },
  {
    "path": "docs/source/conf.py",
    "content": "import sys\nimport os\nimport datetime\nfrom importlib.metadata import version as get_version\n\nproject = 'pyroSAR'\nauthors = 'the pyroSAR Developers'\nyear = datetime.datetime.now().year\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\nsys.path.insert(0, os.path.abspath('../..'))\n\n# The full version, including alpha/beta/rc tags.\nversion_full = get_version(project)\n# The short X.Y version.\nversion = '.'.join(version_full.split('.')[:2])\n# release is automatically added to the latex document title and header\nrelease = version\n\nautodoc_mock_imports = ['osgeo', 'sqlalchemy', 'sqlalchemy_utils', 'geoalchemy2',\n                        'lxml', 'progressbar', 'spatialist']\n\n# If your documentation needs a minimal Sphinx version, state it here.\nneeds_sphinx = '1.6'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n    'sphinx.ext.autodoc',\n    'sphinx.ext.coverage',\n    'sphinx.ext.napoleon',\n    'sphinx.ext.autosummary',\n    'sphinx.ext.intersphinx',\n    'sphinx.ext.viewcode',\n    'sphinxcontrib.bibtex',\n    'sphinxcontrib.cairosvgconverter',\n    'sphinx_autodoc_typehints'\n]\n\nbibtex_bibfiles = ['references.bib']\n\n# autodoc_default_flags = ['members']\nautosummary_generate = []\n\nintersphinx_mapping = {\n    'osgeo': ('https://gdal.org', None),\n    'python': ('https://docs.python.org/3', None),\n    'requests': ('https://requests.readthedocs.io/en/latest', None),\n    'scipy': ('https://docs.scipy.org/doc/scipy', None),\n    'spatialist': ('https://spatialist.readthedocs.io/en/latest', None),\n    'sqlalchemy': ('https://docs.sqlalchemy.org/en/latest', None),\n    'sqlalchemy-utils': ('https://sqlalchemy-utils.readthedocs.io/en/latest', None)\n}\n\nnapoleon_google_docstring = False\nnapoleon_numpy_docstring = True\nnapoleon_include_init_with_doc = False\nnapoleon_include_private_with_doc = False\nnapoleon_include_special_with_doc = True\nnapoleon_use_admonition_for_examples = False\nnapoleon_use_admonition_for_notes = False\nnapoleon_use_admonition_for_references = False\nnapoleon_use_ivar = False\nnapoleon_use_param = True\nnapoleon_use_rtype = True\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix of source filenames.\nsource_suffix = '.rst'\n\n# The encoding of source files.\n# source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\ncopyright = ' (c) 2014-{}, {}'.format(year, authors)\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n# language = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n# today = ''\n# Else, today_fmt is used as the format for a strftime call.\n# today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\nexclude_patterns = ['_build']\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\n# default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n# add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n# add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n# show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n# modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n# keep_warnings = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages.  See the documentation for\n# a list of builtin themes.\nhtml_theme = 'sphinx_rtd_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further.  For a list of options available for each theme, see the\n# documentation.\n# html_theme_options = {}\n\n# Add any paths that contain custom themes here, relative to this directory.\n# html_theme_path = []\n\n# The name for this set of Sphinx documents.  If None, it defaults to\n# \"<project> v<release> documentation\".\n# html_title = None\n\n# A shorter title for the navigation bar.  Default is the same as html_title.\n# html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n# html_logo = None\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n# html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n# html_extra_path = []\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\n# html_last_updated_fmt = '%b %d, %Y'\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n# html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n# html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n# html_additional_pages = {}\n\n# If false, no module index is generated.\n# html_domain_indices = True\n\n# If false, no index is generated.\nhtml_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n# html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n# html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n# html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n# html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it.  The value of this option must be the\n# base URL from which the finished HTML is served.\n# html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n# html_file_suffix = None\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = '{}doc'.format(project)\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n    # The paper size ('letterpaper' or 'a4paper').\n    'papersize': 'a4paper',\n    \n    # The font size ('10pt', '11pt' or '12pt').\n    'pointsize': '10pt',\n    \n    # Additional stuff for the LaTeX preamble.\n    'preamble': r'''\n    \\setcounter{tocdepth}{2}\n    \\setlength{\\headheight}{27pt}\n    ''',\n    \n    # disable floating\n    'figure_align': 'H',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n#  author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n    ('index',\n     '{}.tex'.format(project),\n     r'{} Documentation'.format(project),\n     authors, 'manual'),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n# latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n# latex_use_parts = False\n\n# If true, show page references after internal links.\n# latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n# latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n# latex_appendices = []\n\n# If false, no module index is generated.\n# latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n    ('index',\n     project,\n     '{} Documentation'.format(project),\n     [authors],\n     1)\n]\n\n# If true, show URL addresses after external links.\n# man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n#  dir menu entry, description, category)\ntexinfo_documents = [\n    ('index',\n     project,\n     '{} Documentation'.format(project),\n     authors,\n     project,\n     'One line description of project.',\n     'Miscellaneous'),\n]\n\n# Documents to append as an appendix to all manuals.\n# texinfo_appendices = []\n\n# If false, no module index is generated.\n# texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n# texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n# texinfo_no_detailmenu = False\n"
  },
  {
    "path": "docs/source/general/DEM.rst",
    "content": "###############\nDEM Preparation\n###############\n\nSAR processing requires a high resolution Digital Elevation Model for ortho-rectification and normalization of\nterrain-specific imaging effects.\n\nIn SNAP, the DEM is usually auto-downloaded by the software itself and the user only specifies the DEM source to be\nused, e.g. SRTM. pyroSAR's convenience function :func:`pyroSAR.snap.util.geocode` can additionally pass SNAP's option to use an\nexternal DEM file via parameters `externalDEMFile`, `externalDEMNoDataValue` and `externalDEMApplyEGM`.\n\nGAMMA does not provide ways to automatically download DEMs for processing and the user thus also needs to provide an\nexternal DEM file in GAMMA's own format. However, several commands are available to prepare these DEMs including\nconversion from geoid heights to WGS84 ellipsoid heights.\n\npyroSAR offers several convenience functions to automatically prepare DEM mosaics from different\nsources to use them in either SNAP or GAMMA.\n\nDownload of DEM Tiles\n=====================\n\nThe function :func:`pyroSAR.auxdata.dem_autoload` offers convenient download of tiles from different sources\noverlapping with user-defined geometries. Optionally, a buffer in degrees can be defined.\nThis function internally makes use of the function :func:`spatialist.auxil.gdalbuildvrt`.\n\n.. code-block:: python\n\n    from pyroSAR.auxdata import dem_autoload\n    from spatialist import Vector\n\n    site = 'mysite.shp'\n    vrt = 'mosaic.vrt'\n\n    with Vector(site) as vec:\n        vrt = dem_autoload(geometries=[vec],\n                           demType='SRTM 1Sec HGT',\n                           vrt=vrt,\n                           buffer=0.1)\n\nThe tiles, which are delivered in compressed archives, are directly connected to a virtual mosaic using GDAL's VRT\nformat, making it easier to work with them by treating them as a single file.\nFor downloading tiles of some DEM types, e.g. `TDX90m`, an account needs to be created and the user credentials be passed to\nfunction :func:`~pyroSAR.auxdata.dem_autoload`. See the function's documentation for further details.\n\nThe files are stored in SNAP's location for auxiliary data, which per default is `$HOME/.snap/auxdata/dem`.\nThe function :func:`~pyroSAR.auxdata.dem_autoload` has proven beneficial in server environments where not each node has internet access and the tiles thus\nneed to be downloaded prior to processing on these nodes.\n\nDEM Mosaicing\n=============\n\nIn a next step we create a mosaic GeoTIFF cropped to the boundaries defined in the VRT using the function\n:func:`pyroSAR.auxdata.dem_create`.\nThe spatial reference system, WGS84 UTM 32N in this case, is defined by its EPSG code but also several other options\nare available. Since for SAR processing we are interested in ellipsoid heights, we call the function with the according\nparameter `geoid_convert` set to `True`.\nThis function makes use of :func:`spatialist.auxil.gdalwarp`.\nConversion of vertical reference systems, e.g. from geoid to ellipsoid, requires GDAL version >=2.2.\n\n.. code-block:: python\n\n    from pyroSAR.auxdata import dem_create\n\n    outname = 'mysite_srtm.tif'\n\n    dem_create(src=vrt, dst=outname,\n               t_srs=32632, tr=(20, 20),\n               resampling_method='bilinear',\n               geoid_convert=True, geoid='EGM96')\n\nGAMMA Import\n============\n\nFor convenience, pyroSAR's :mod:`~pyroSAR.gamma` submodule contains a function :func:`pyroSAR.gamma.dem.dem_autocreate`, which is a\ncombination of functions :func:`~pyroSAR.auxdata.dem_autoload` and :func:`~pyroSAR.auxdata.dem_create` and further\nexecutes GAMMA commands for format conversion.\nIt offers the same parameters as these two functions and a user can additionally decide whether geoid-ellipsoid\nconversion is done in GDAL or in GAMMA via parameter `geoid_mode`. The output is a file in GAMMA format, which can\ndirectly be used for processing by e.g. function :func:`pyroSAR.gamma.geocode`.\n"
  },
  {
    "path": "docs/source/general/OSV.rst",
    "content": "####################################\nHandling of Orbit State Vector Files\n####################################\nSAR products require additional orbit state vector (OSV) information to improve their spatial location accuracy.\nThis information is found in externally hosted files, which need to be downloaded separately and are then used by SAR\nprocessing software to update the product's metadata. Currently, pyroSAR only supports handling of Sentinel-1 OSV files.\n\nIn SNAP, the corresponding processing node is called `Apply-Orbit-File`, which automatically downloads the OSV file and\nupdates the scene's metadata. The files are stored in SNAP's location for auxiliary data,\nwhich per default is `$HOME/.snap/auxdata/Orbits`.\n\nIn GAMMA, on the other hand, the downloading has to be done manually after which the command `isp.S1_OPOD_vec` can be\nused for updating the metadata. pyroSAR offers several approaches for automatically downloading these\nfiles. The central tool for managing existing files and downloading new ones is the class :class:`pyroSAR.S1.OSV`, which\nis used for all approaches.\n\n.. note::\n\n    in the following a dedicated directory is defined into which the files will be downloaded. If this directory is\n    not defined (default is `None`), the files will be downloaded to SNAP's auxiliary data location (see above). This is\n    recommended as the files are kept in a central location that is accessible both by SNAP and by pyroSAR's GAMMA\n    functionality.\n\napproach 1: direct download by time span\n========================================\n\nIn case a large number of scenes is to be processed and/or no internet access is available during processing, the files\ncan be downloaded by time span to a central directory. This is the most basic approach using the central class\n:class:`~pyroSAR.S1.OSV` mentioned above, making use of its methods :meth:`~pyroSAR.S1.OSV.catch` and\n:meth:`~pyroSAR.S1.OSV.retrieve`.\n\n.. code-block:: python\n\n    from pyroSAR.S1 import OSV\n\n    osvdir = '/path/to/osvdir'\n\n    with OSV(osvdir) as osv:\n        files = osv.catch(sensor='S1A', osvtype='POE',\n                          start='20170101T000000', stop='20180101T000000',\n                          url_option=1)\n        osv.retrieve(files)\n\nTwo sub-directories `POEORB` and `RESORB` will be created in `osvdir` containing the downloaded files. `POEORB` will\ncontain the `Precise Orbit Ephemerides` files, which are the most accurate but are first available about two weeks after\nthe scene's acquisition. `RESORB` describes the `Restituted Orbit` files, which are less accurate but available\ndirectly after acquisition. See method :meth:`~pyroSAR.S1.OSV.catch` for download URL options.\n\napproach 2: manual download per scene\n=====================================\n\nThe method :meth:`pyroSAR.drivers.SAFE.getOSV` can be used to directly retrieve the files relevant for the scene.\nThis method internally uses the methods described above with a time span limited to that of the scene acquisition.\n\n.. code-block:: python\n\n    from pyroSAR import identify\n    scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'\n    id = identify(scene)\n    match = id.getOSV(osvdir='/path/to/osvdir', osvType='POE', returnMatch=True)\n    print(match)\n\napproach 3: direct download and scene metadata update (GAMMA only)\n==================================================================\n\nThe convenience function :func:`pyroSAR.gamma.correctOSV` internally makes use of approach 2 and additionally directly\nexecutes the GAMMA command `isp.S1_OPOD_vec` for updating the scene's metadata with the information of the OSV file.\nThe scene has to be unpacked first (see :meth:`pyroSAR.drivers.SAFE.unpack`).\n\n.. code-block:: python\n\n    from pyroSAR import identify\n    from pyroSAR.gamma import correctOSV\n    scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'\n    id = identify(scene)\n    id.unpack('tmpdir')\n    correctOSV(id=id, osvdir='/path/to/osvdir', osvType='POE')\n\napproach 4: automatic download and use during processing\n========================================================\n\nThe processing function :func:`pyroSAR.gamma.geocode` automatically downloads OSV files needed for processing and\nupdates the scene's metadata using function :func:`~pyroSAR.gamma.correctOSV`.\nIt is thus the most convenient way to handle these files and related processing steps.\nThe parameter `allow_RES_OSV` can be used to allow processing with `RES` files if no `POE` file is available yet.\n\n.. code-block:: python\n\n    from pyroSAR.gamma import geocode\n    scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'\n    geocode(scene=scene,\n            dem='/path/to/demfile',\n            tmpdir='tmpdir',\n            outdir='outdir',\n            targetres=20,\n            osvdir='/path/to/osvdir',\n            allow_RES_OSV=False)\n\nSimilarly, the function :func:`pyroSAR.snap.util.geocode` also automatically downloads OSV files and chooses the best\nmatching OSV type for processing.\n\n.. code-block:: python\n\n    from pyroSAR.snap import geocode\n    scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'\n    geocode(infile=scene,\n            outdir='outdir',\n            allow_RES_OSV=True)\n\nIn contrast to the GAMMA function, the OSV download directory cannot be set because of the fixed SNAP auxiliary data\nlocation. The type of the available OSV file is written to the workflow XML file for processing:\n\n.. code-block:: xml\n\n    <node id=\"Apply-Orbit-File\">\n        <operator>Apply-Orbit-File</operator>\n        <sources>\n            <sourceProduct refid=\"Read\"/>\n        </sources>\n        <parameters class=\"com.bc.ceres.binding.dom.XppDomElement\">\n            <orbitType>Sentinel Restituted (Auto Download)</orbitType>\n            <polyDegree>3</polyDegree>\n            <continueOnFail>false</continueOnFail>\n        </parameters>\n    </node>\n"
  },
  {
    "path": "docs/source/general/configuration.rst",
    "content": "#############\nConfiguration\n#############\n\npyroSAR stores configuration under `$HOME/.pyrosar`.\nIt contains a file `config.ini` which stores installation paths of SNAP and GAMMA.\nThe installations are first identified by running the respective `Examine*` class (e.g. :class:`~pyroSAR.examine.ExamineSnap`):\n\n.. code-block:: python\n\n    from pyroSAR.examine import ExamineSnap\n    config = ExamineSnap()\n\nSNAP configuration can also be modified with this class, either by the object properties `userpath` and `auxdatapath` or by the underlying :class:`~pyroSAR.examine.SnapProperties` object:\n\n.. code-block:: python\n\n    config.userpath = '/path/to/snap/data'\n    config.snap_properties['snap.userdir'] = '/path/to/snap/data'\n\nThe values are directly written to either `snap.auxdata.properties` or `snap.properties` under `$HOME/.snap/etc`.\nThe content of these files will override that in the files found under `etc` in the SNAP installation folder.\nSetting a parameter to `None` will comment out the value in the respective file.\n"
  },
  {
    "path": "docs/source/general/filenaming.rst",
    "content": "###########\nFile Naming\n###########\n\npyroSAR internally uses a fixed naming scheme to keep track of processed results. For each scene an identifier is created,\nwhich contains the sensor, acquisition mode, orbit (ascending or descending) and the time stamp of the acquisition start.\nFor example `S1A__IW___A_20150222T170750`, which is created by calling method :meth:`~pyroSAR.drivers.ID.outname_base`:\n\n.. code-block:: python\n\n    from pyroSAR import identify\n    id = identify('S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip')\n    print(id.outname_base())\n\nFor each attribute a fixed number of digits is reserved. In case the attribute is shorter than this number,\nthe rest of the digits is filled with underscores. I.e., the sensor field is four digits long, but 'S1A' only three.\nThus, `S1A_` is the sensor slot. In the same way, `IW__` is the acquisition mode slot, which is also four digits long.\n`A` denotes ascending orbit, the time stamp is in format YYYYmmddTHHMMSS.\n\nProcessing functions like :func:`~pyroSAR.gamma.geocode` add suffixes to this identifier to further keep track of\nindividual processing steps performed on the dataset.\nThis core concept is used by many pyroSAR functions internally to keep track of which scenes have been processed before.\n"
  },
  {
    "path": "docs/source/general/installation.rst",
    "content": "############\nInstallation\n############\n\nconda\n=====\n\nStarting with version 0.11, pyroSAR is distributed via `conda-forge <https://anaconda.org/conda-forge/pyrosar>`_\nand can easily be installed with\n\n::\n\n    conda install --channel conda-forge pyrosar\n\nThis is by far the easiest way to work with pyroSAR on any operating system.\n\npip\n===\n\nInstallation with pip is also supported and offers the advantage to install intermediate development stages directly\nfrom the GitHub repository. Mind however that several dependencies like GDAL cannot fully be installed this way.\nSee further below for detailed Linux dependency installation instructions.\n\nInstallation of pip (Linux):\n\n::\n\n    sudo apt-get install python-pip\n\nThe latest stable release of pyroSAR can then be installed:\n\n::\n\n    python -m pip install pyroSAR\n\nFor installation of the latest master branch on GitHub, we need the version control system git. On Windows, git can be\ndownloaded from `git-scm.com <https://git-scm.com/downloads>`_. On Linux you can install it via command line:\n\n::\n\n    sudo apt-get install git\n\nOnce everything is set up, pyroSAR is ready to be installed:\n\n::\n\n    python -m pip install git+https://github.com/johntruckenbrodt/pyroSAR.git\n\nDependencies\n============\nThe more specific instructions below are intended for Linux users who like to work outside of the Anaconda environment.\n\nGDAL\n----\npyroSAR requires GDAL version 2.1 with GEOS and PROJ4 as dependencies as well as the GDAL Python binding.\n\nUbuntu\n++++++\nStarting with release Yakkety (16.10), Ubuntu comes with GDAL >2.1.\nYou can install it like this:\n\n::\n\n    sudo apt-get install python-gdal python3-gdal gdal-bin\n\nFor older Ubuntu releases you can add the ubuntugis repository to apt prior to installation to install version >2.1:\n\n::\n\n    sudo add-apt-repository ppa:ubuntugis/ppa\n    sudo apt-get update\n\nThis way the required dependencies (GEOS and PROJ4 in particular) are also installed.\nYou can check the version by typing:\n\n::\n\n    gdalinfo --version\n\nDebian\n++++++\nStarting with Debian 9 (Stretch) GDAL is available in version >2.1 in the official repository.\n\nBuilding from source\n++++++++++++++++++++\nAlternatively, you can build GDAL and the dependencies from source. The script `pyroSAR/install/install_deps.sh`\ngives specific instructions on how to do it. It is not yet intended to run this script via shell, but rather to\nfollow the instructions step by step.\n\nSQLite + SpatiaLite\n-------------------\nWhile `sqlite3` and its Python binding are usually already installed, the `spatialite` extension needs to be\nadded. Two packages exist, `libspatialite` and `mod_spatialite`. Both can be used by pyroSAR.\nOn Ubuntu, `mod_spatialite` has been found to be easier to setup with `sqlite` and can be installed via `apt`:\n\n::\n\n    sudo apt-get install libsqlite3-mod-spatialite\n\nOn CentOS, `libspatialite` including shared objects for extension loading can be installed via `yum`:\n\n::\n\n    sudo yum install libspatialite-devel\n\nThe following can be run in Python to test the needed functionality:\n\n.. code-block:: python\n\n    import sqlite3\n\n    # setup an in-memory database\n    con=sqlite3.connect(':memory:')\n\n    # enable loading extensions and load spatialite\n    con.enable_load_extension(True)\n    try:\n        con.load_extension('mod_spatialite.so')\n    except sqlite3.OperationalError:\n        con.load_extension('libspatialite.so')\n\nIn case loading extensions is not permitted you might need to install the package `pysqlite2`\ntogether with a static build of `sqlite3`. See the script `pyroSAR/install/install_deps.sh` for instructions.\nThere you can also find instructions on how to install `spatialite` from source.\nTo test `pysqlite2` you can import it as follows and then run the test above:\n\n.. code-block:: python\n\n    from pysqlite2 import dbapi2 as sqlite3\n\nInstalling this package is likely to cause problems with the `sqlite3` library installed on the system.\nThus, it is safer to build a static `sqlite3` library for it (see installation script).\n\nGAMMA\n-----\nGAMMA's home directory as environment variable 'GAMMA_HOME' is expected to end either as GAMMA_SOFTWARE-<VERSIONNUMBER> or GAMMA_SOFTWARE/<VERSIONNUMBER>. \nIf this differs in your install and cannot be changed, a workaround is adjusting the expected pattern in :class:`~pyroSAR.examine.ExamineGamma`.\n"
  },
  {
    "path": "docs/source/general/logging.rst",
    "content": "#######\nLogging\n#######\n\npyroSAR makes use of the :mod:`logging` module to display status messages for running processes.\nSee `Logging HOWTO <https://docs.python.org/3/howto/logging.html>`_ for a basic tutorial.\nTo display log messages you may add one of the following examples to your script:\n\n.. code-block:: python\n\n  import logging\n\n  # basic info\n  logging.basicConfig(level=logging.INFO)\n\n  # basic info with some message filtering\n  logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)\n\n  # detailed debug info\n  logging.basicConfig(level=logging.DEBUG)\n"
  },
  {
    "path": "docs/source/general/processing.rst",
    "content": "#################################\nSAR Image Handling and Processing\n#################################\n\nImage Metadata\n==============\n\nLet's start working with our actual satellite data.\nAt first we load the scene into pyroSAR for analysis of the metadata:\n\n.. code-block:: python\n\n    from pyroSAR import identify\n    name = 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'\n    scene = identify(name)\n    print(scene)\n\nThis will automatically identify the scene, scan it for metadata and print a summary of selected metadata entries.\nSeveral attribute names (e.g. `sensor` and `acquisition_mode`) are standardized for all SAR scenes.\nFurther entries, whose names are not standardized, can be found in a dictionary `scene.meta`.\nThe function :func:`~pyroSAR.drivers.identify` will loop through all SAR images classes (:mod:`pyroSAR.drivers`) and return an\nobject of the class that was successful in identifying the scene (:class:`~pyroSAR.drivers.SAFE` in this case).\n\n.. _database-handling:\n\nDatabase Handling\n=================\n\nNow that we have made ourselves familiar with the scene, we can import its metadata into an SQLite database using class\n:class:`~pyroSAR.archive.Archive`:\n\n.. code-block:: python\n\n    from pyroSAR import Archive\n    dbfile = 'scenes.db'\n    with Archive(dbfile) as archive:\n        archive.insert(scene)\n\n`dbfile` is a file either containing an already existing database or one to be created.\nIn this case an SQLite database with SpatiaLite extension is created.\nAlternatively, PostgreSQL + PostGIS can be used.\n\nLet's assume our database contains a number of scenes and we want to select some for processing.\nWe have a shapefile, which contains a geometry delimiting our test site for which we want to\nprocess some Sentinel-1 scenes.\nWe already processed some scenes in the past and the results are stored in a directory\n`outdir`. We only want to select scenes which have not been processed to this directory before.\nFurthermore, we are only interested in scenes acquired in Ground Range Detected (GRD) Interferometric Wide\nSwath mode (IW), which contain a VV band.\n\n.. code-block:: python\n\n    from spatialist import Vector\n    archive = Archive('scenes.db')\n    outdir = '/path/to/processed/results'\n    maxdate = '20171231T235959'\n    with Vector('site.shp') as site:\n        selection_proc = archive.select(vectorobject=site,\n                                        processdir=outdir,\n                                        maxdate=maxdate,\n                                        sensor=('S1A', 'S1B'),\n                                        product='GRD',\n                                        acquisition_mode='IW',\n                                        vv=1)\n    archive.close()\n\nHere we use the vector geometry driver of package :doc:`spatialist <spatialist:index>`, which is developed alongside of pyroSAR.\nThe :class:`spatialist.Vector <spatialist.vector.Vector>` object is then passed to method\n:meth:`Archive.select <pyroSAR.drivers.Archive.select>`.\n\n.. _processing:\n\nProcessing\n==========\n\nThe returned `selection_proc` is a list of file names for the scenes we selected from the database, which we can now\npass to a processing function:\n\n.. code-block:: python\n\n    from pyroSAR.snap import geocode\n\n    # the target pixel spacing in meters\n    spacing = 20\n\n    for scene in selection_proc:\n        geocode(infile=scene, outdir=outdir, tr=spacing, scaling='db', shapefile=site)\n\nThe function :func:`snap.geocode <pyroSAR.snap.util.geocode>` is a basic utility for SNAP.\nIt will perform all necessary steps to subset, resample, topographically normalize, geocode and scale the input\nimage and write GeoTIFF files to the selected output directory.\nAll necessary files like orbit state vectors and SRTM DEM tiles are downloaded automatically in the background by SNAP.\nSNAP is most conveniently used with workflow XMLs. The function geocode parses a workflow for the particular scene,\nparametrizes it (depending on the scene type and selected processing parameters) and writes it to the output directory.\nIt then calls the command `gpt`, which is SNAP's command line interface, on the workflow to execute the processing steps.\n\n"
  },
  {
    "path": "docs/source/general/snap.rst",
    "content": "########\nSNAP API\n########\n\npyroSAR offers a collection of tools to parse SNAP XML workflows and execute them with SNAP's Graph Processing Tool\n(`GPT <https://senbox.atlassian.net/wiki/spaces/SNAP/pages/70503475/Bulk+Processing+with+GPT>`_). All functionality is\npurely performed in Python and only the command line calls to GPT interact with SNAP. SNAP's Python API\n`snappy <https://senbox.atlassian.net/wiki/spaces/SNAP/pages/19300362/How+to+use+the+SNAP+API+from+Python>`_ is not used\ndue to installation limitations and processing performance.\n\nThe following serves as a minimal example to showcase the core API functionality. A more complex example is given with\nfunction :func:`pyroSAR.snap.util.geocode`.\n\n.. code-block:: python\n\n    from pyroSAR.snap.auxil import parse_recipe, parse_node\n\n    workflow = parse_recipe('blank')\n\n    read = parse_node('Read')\n    read.parameters['file'] = 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'\n    read.parameters['formatName'] = 'SENTINEL-1'\n    workflow.insert_node(read)\n\n    tnr = parse_node('ThermalNoiseRemoval')\n    workflow.insert_node(tnr, before=read.id)\n\n    bnr = parse_node('Remove-GRD-Border-Noise')\n    bnr.parameters['selectedPolarisations'] = ['VV']\n    workflow.insert_node(bnr, before=tnr.id)\n\n    write = parse_node('Write')\n    write.parameters['file'] = 'outname'\n    write.parameters['formatName'] = 'BEAM-DIMAP'\n    workflow.insert_node(write, before=bnr.id)\n\n    workflow.write('outname_proc')\n\nHere, the function :func:`~pyroSAR.snap.auxil.parse_recipe` is first used to create an empty workflow object of type\n:class:`~pyroSAR.snap.auxil.Workflow`.\nUsing the function :func:`~pyroSAR.snap.auxil.parse_node`, individual processing nodes can be loaded as\n:class:`~pyroSAR.snap.auxil.Node` objects and parameterized using a :class:`~pyroSAR.snap.auxil.Par` object via\n``<node>.parameters``.\nThe method :meth:`~pyroSAR.snap.auxil.Workflow.insert_node` is then used to insert the nodes into the workflow including\nlinking of the nodes by modifying the source node entries. E.g. `Read` is set as source of the newly inserted\n`Remove-GRD-Border-Noise` node. As a last step, the workflow is written to an XML file with method\n:meth:`~pyroSAR.snap.auxil.Workflow.write`.\n\nThis XML file can then be passed to function :func:`~pyroSAR.snap.auxil.gpt` to process the workflow by internally\ncalling the GPT command line tool:\n\n.. code-block:: python\n\n    from pyroSAR.snap.auxil import gpt\n\n    gpt('outname_proc.xml', tmpdir='.')\n\nworkflow splitting\n==================\n\nSimple workflows like the one shown above take only a few seconds to process, but the more processing nodes are added,\nthe more time it obviously takes to execute them. However, it was observed that executing long workflows takes longer\nand consumes more memory than executing each node individually. pyroSAR offers functionality to split long workflows\ninto smaller groups and execute them in sequence with intermediate files being written in a temporary directory.\nFirst, the workflow nodes are grouped to contain a defined number of processing nodes, i.e. everything but `Read` and\n`Write`, using function :func:`~pyroSAR.snap.auxil.groupbyWorkers`:\n\n.. code-block:: python\n\n    from pyroSAR.snap.auxil import groupbyWorkers\n\n    groupbyWorkers('outname_proc.xml', n=1)\n\nThis will return\n\n.. code-block:: python\n\n    [['Read', 'ThermalNoiseRemoval'], ['Remove-GRD-Border-Noise', 'Write']]\n\nThese groups can directly be passed passed to function :func:`~pyroSAR.snap.auxil.gpt` via parameter ``groups``.\nInternally the workflow is then split based on the groups and written to new XML files in a temporary directory using\nfunction :func:`~pyroSAR.snap.auxil.split`. In this case, two workflows would be created:\n\n- `Read` -> `ThermalNoiseRemoval` -> `Write`\n- `Read` -> `Remove-GRD-Border-Noise` -> `Write`\n\nThese new files are then executed in sequence with intermediate `BEAM-DIMAP`\nfiles written in the same directory as the sub-workflow XML files. After processing this directory is deleted unless\nparameter ``cleanup`` of function :func:`~pyroSAR.snap.auxil.gpt` is set to ``False``.\n\nbackwards compatibility\n=======================\n\nWith new versions of SNAP, new parameters are introduced and others removed. If a new parameter is not listed in the\nnode's XML description its default is used by SNAP during processing. If, however, a parameter is contained in the\nworkflow that is no longer supported by SNAP, the processing will be terminated. This can easily happen if the workflow\nwas created by an older version of SNAP. pyroSAR reads the error messages and, if an unknown parameter is mentioned,\ndeletes this parameter from the workflow, saves it to a new file and executes it instead.\n\ntroubleshooting\n===============\n\nSNAP as well as pyroSAR's SNAP API are constantly being developed and bugs are unfortunately inevitable.\nThis section is intended to guide users to better interpret errors and unexpected behaviour.\n\n*The process is running but seems inactive without any progress.*\n\nThis might be related to SNAP's inability to download needed DEM tiles.\nSNAP will be stuck in a loop infinitely trying to download the missing tiles.\nThis can be identified by directly running gpt in the command line.\nHowever, by operating gpt through a Python subprocess, it is not possible to see those command line messages.\nOnly after a process has terminated, all messages can be retrieved and be written to log or error files.\n\nA simple approach to interpret such a behaviour is to first create a workflow XML file with\n:func:`~pyroSAR.snap.util.geocode`'s parameter ``test=True`` (so that only the XML is written but it is not executed):\n\n.. code-block:: python\n\n    from pyroSAR.snap import geocode\n    geocode(scene='S1A_IW_GRDH_1SDV_20200720T023849_20200720T023914_033532_03E2B5_2952.zip',\n            outdir='/test', test=True)\n\n\nand then run gpt on it directly in the shell (i.e. outside of Python):\n\n::\n\n    gpt /test/S1A__IW___D_20200720T023849_VV_Orb_ML_TC_proc.xml\n\nThis way one can directly see gpt's status, which in this case might be\n\n::\n\n    SEVERE: org.esa.snap.core.dataop.dem.ElevationFile: java.lang.reflect.InvocationTargetException"
  },
  {
    "path": "docs/source/index.rst",
    "content": "###################################\nWelcome to pyroSAR's documentation!\n###################################\n\nGeneral Topics\n==============\n\n.. toctree::\n    :maxdepth: 1\n\n    general/installation\n    general/filenaming\n    general/configuration\n    general/OSV\n    general/DEM\n    general/snap\n    general/processing\n    general/logging\n\nAPI Documentation\n=================\n\n.. toctree::\n    :maxdepth: 1\n\n    api/drivers\n    api/archive\n    api/snap\n    api/gamma/index\n    api/sentinel-1\n    api/auxdata\n    api/datacube\n    api/ancillary\n    api/examine\n\nAbout\n=====\n\n.. toctree::\n    :maxdepth: 1\n\n    about/projects\n    about/changelog\n    about/publications\n    about/references\n\nIndices and tables\n==================\n\n* :ref:`genindex`\n* :ref:`modindex`\n* :ref:`search`\n"
  },
  {
    "path": "docs/source/references.bib",
    "content": "% Encoding: UTF-8\n@article{Ali2018,\n   author = {Ali, I. and Cao, S. and Naeimi, V. and Paulik, C. and Wagner, W.},\n   title = {Methods to Remove the Border Noise From Sentinel-1 Synthetic Aperture Radar Data: Implications and Importance For Time-Series Analysis},\n   journal = {IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing},\n   volume = {11},\n   number = {3},\n   pages = {777-786},\n   DOI = {10.1109/Jstars.2017.2787650},\n   year = {2018},\n   type = {Journal Article}\n}\n\n@techreport{Miranda2018,\n   author = {Miranda, N. and Hajduch, G.},\n   title = {Masking \"No-value\" Pixels on GRD Products generated by the Sentinel-1 ESA IPF},\n   institution = {CLS},\n   month = {29 January},\n   url = {https://sentinel.esa.int/documents/247904/2142675/Sentinel-1-masking-no-value-pixels-grd-products-note},\n   year = {2018},\n   type = {Report}\n}\n\n@article{Small2011,\n   author = {Small, D.},\n   title = {Flattening Gamma: Radiometric Terrain Correction for SAR Imagery},\n   journal = {IEEE Transactions on Geoscience and Remote Sensing},\n   volume = {49},\n   number = {8},\n   pages = {3081-3093},\n   DOI = {10.1109/Tgrs.2011.2120616},\n   year = {2011},\n   type = {Journal Article}\n}\n\n@inproceedings{Truckenbrodt2019,\n   author = {Truckenbrodt, J. and Cremer, F. and Baris, I. and Eberle, J.},\n   title = {pyroSAR: A Framework for Large-Scale SAR Satellite Data Processing},\n   booktitle = {Big Data from Space},\n   editor = {Soille, P. and Loekken, S. and Albani, S.},\n   address = {Luxembourg},\n   publisher = {Publications Office of the European Union},\n   pages = {197-200},\n   ISBN = {ISBN 978-92-76-00034-1},\n   DOI = {10.2760/848593},\n   year = {2019},\n   type = {Conference Proceedings}\n}\n\n@article{Truckenbrodt2019a,\n   author = {Truckenbrodt, J. and Freemantle, T. and Williams, C. and Jones, T. and Small, D. and Dubois, C. and Thiel, C. and Rossi, C. and Syriou, A. and Giuliani, G.},\n   title = {Towards Sentinel-1 SAR Analysis-Ready Data: A Best Practices Assessment on Preparing Backscatter Data for the Cube},\n   journal = {Data},\n   volume = {4},\n   number = {3},\n   ISSN = {2306-5729},\n   DOI = {10.3390/data4030093},\n   year = {2019},\n   type = {Journal Article}\n}\n\n@article{Visvalingam1993,\n   author = {Visvalingam, M. and Whyatt, J. D.},\n   title = {Line Generalization by Repeated Elimination of Points},\n   journal = {Cartographic Journal},\n   volume = {30},\n   number = {1},\n   pages = {46-51},\n   ISSN = {0008-7041},\n   DOI = {10.1179/caj.1993.30.1.46},\n   year = {1993},\n   type = {Journal Article}\n}\n"
  },
  {
    "path": "environment-dev.yml",
    "content": "name: ps_test_dev\nchannels:\n  - conda-forge\ndependencies:\n  - gdal>=2.4\n  - geoalchemy2<0.14.0\n  - libgdal\n  - libspatialite>=5.1.0\n  - lxml\n  - numpy\n  - packaging\n  - pillow\n  - progressbar2\n  - psycopg2\n  - python>=3.10\n  - pyyaml\n  - requests\n  - shapely\n  - spatialist>=0.17.0\n  - sqlalchemy>=1.4,<2.0\n  - sqlalchemy-utils>=0.37,<0.42\n  - coverage\n  - pytest\n  - flake8"
  },
  {
    "path": "environment-doc.yml",
    "content": "name: ps_doc\nchannels:\n  - conda-forge\ndependencies:\n  - python>=3.10\n  - matplotlib\n  - numpy\n  - sphinx\n  - sphinx_rtd_theme\n  - sphinxcontrib-bibtex>=2.2\n  - sphinxcontrib-svg2pdfconverter\n  - cairosvg\n  - sphinx-autodoc-typehints\n"
  },
  {
    "path": "environment.yml",
    "content": "name: ps_test\nchannels:\n  - conda-forge\ndependencies:\n  - gdal>=2.4\n  - geoalchemy2<0.14.0\n  - libgdal\n  - libspatialite>=5.1.0\n  - lxml\n  - numpy\n  - packaging\n  - pillow\n  - progressbar2\n  - psycopg2\n  - python>=3.10\n  - pyyaml\n  - requests\n  - shapely\n  - spatialist>=0.17.0\n  - sqlalchemy>=1.4,<2.0\n  - sqlalchemy-utils>=0.37,<0.42"
  },
  {
    "path": "pyproject.toml",
    "content": "[build-system]\nrequires = [\"setuptools>=45\", \"setuptools_scm[toml]>=6.2\", \"wheel\"]\n\n[project]\nname = \"pyroSAR\"\ndescription = \"a framework for large-scale SAR satellite data processing\"\nrequires-python = \">=3.10\"\nlicense = { file = \"LICENSE.txt\" }\nmaintainers = [\n    { name = \"John Truckenbrodt\", email = \"john.truckenbrodt@dlr.de\" }\n]\nclassifiers = [\n    \"License :: OSI Approved :: MIT License\",\n    \"Operating System :: Microsoft :: Windows\",\n    \"Operating System :: POSIX :: Linux\",\n    \"Programming Language :: Python :: 3\"\n]\ndynamic = [\"version\", \"readme\", \"dependencies\"]\n\n[project.urls]\nrepository = \"https://github.com/johntruckenbrodt/pyroSAR\"\ndocumentation = \"https://pyrosar.readthedocs.io/en/latest/\"\n\n[project.optional-dependencies]\ntest = [\"pytest\"]\ndocs = [\n    \"sphinx\", \"sphinx_rtd_theme\", \"sphinxcontrib-bibtex\",\n    \"sphinxcontrib-svg2pdfconverter\", \"cairosvg\",\n    \"sphinx-autodoc-typehints\"\n]\n\n[tool.setuptools.dynamic]\ndependencies = { file = [\"requirements.txt\"] }\nreadme = { file = [\"README.md\"], content-type = \"text/markdown\" }\n\n[tool.setuptools_scm]\n"
  },
  {
    "path": "pyroSAR/ERS/__init__.py",
    "content": "from .auxil import passdb_create, passdb_query\nfrom .mapping import get_resolution_nesz"
  },
  {
    "path": "pyroSAR/ERS/auxil.py",
    "content": "###############################################################################\n# tools for processing ERS satellite data\n\n# Copyright (c) 2014-2019, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n################################################################################\nimport os\nimport math\nfrom spatialist import sqlite_setup\nfrom spatialist.ancillary import HiddenPrints\nfrom datetime import datetime, timedelta\n\nimport logging\nlog = logging.getLogger(__name__)\n\n\ndef passdb_create(ers1passes, ers2passes, dbname):\n    \"\"\"\n    create a sqlite database from ERS pass tables\n    downloaded from http://www.deos.tudelft.nl/ers/phases/starttimes.html.\n    There you can also find additional information on the file structure and background.\n    The fields `phase`, `cycle`, `pass`, `starttime` and `endtime` are read from the table.\n    The latter two are converted to format YYYY-MM-DD HH:MM:SS.SSS.\n    The fields `cycle` and `pass` are converted to integer.\n    All five fields plus the name of the sensor (`ERS1` or `ERS2`) are then stored to the database.\n\n    Parameters\n    ----------\n    ers1passes: str\n        the name of the ERS-1 pass table\n    ers2passes: str\n        the name of the ERS-2 pass table\n    dbname: str\n        the name of the database to write the results to\n\n    Returns\n    -------\n\n    \"\"\"\n    columns = {'satellite': 'TEXT',\n               'phase': 'TEXT',\n               'cycleNumber': 'INTEGER',\n               'passNumber': 'INTEGER',\n               'starttime': 'TEXT',\n               'endtime': 'TEXT'}\n    \n    con = sqlite_setup(driver=dbname)\n    \n    create_string = '''CREATE TABLE if not exists data ({})'''.format(\n        ', '.join([' '.join(x) for x in columns.items()]))\n    cursor = con.cursor()\n    cursor.execute(create_string)\n    \n    def time_convert(timestring):\n        dt = datetime(1985, 1, 1) + timedelta(seconds=float(timestring))\n        return dt.strftime('%Y-%m-%d %H:%M:%S.%f')\n    \n    insert_string = '''INSERT INTO data({0}) VALUES({1})''' \\\n        .format(', '.join(columns.keys()),\n                ', '.join(['?'] * len(columns.keys())))\n    \n    for satellite, filename in [('ERS1', ers1passes), ('ERS2', ers2passes)]:\n        with open(filename, 'r') as table:\n            for line in table:\n                phase, cycle, passNumber, starttime, endtime = line.split()[0:5]\n                insertion = [satellite, phase,\n                             int(cycle), int(passNumber),\n                             time_convert(starttime), time_convert(endtime)]\n                if satellite == 'ERS1':\n                    log.info(tuple(insertion))\n                cursor.execute(insert_string, tuple(insertion))\n    con.commit()\n    con.close()\n\n\ndef passdb_query(satellite, acqtime, dbname=None):\n    \"\"\"\n    query the orbit information for an ERS acquisition\n\n    Parameters\n    ----------\n    satellite: {'ERS1', 'ERS2'}\n        the name of the satellite\n    acqtime: datetime.datetime\n        the acquisition of the satellite image\n    dbname: str, None\n        the name of the database as created by :func:`passdb_create`. If None, the default database delivered with\n        pyroSAR is used\n\n    Returns\n    -------\n\n    \"\"\"\n    if satellite == 'ERS1':\n        # the last timestamp for which specific ERS-1 orbit information is present,\n        # afterwards that of ERS-2 is used\n        last = datetime.strptime('1996-06-02 21:59:26.618659', '%Y-%m-%d %H:%M:%S.%f')\n        sat = 'ERS2' if acqtime > last else 'ERS1'\n    elif satellite == 'ERS2':\n        sat = 'ERS2'\n    else:\n        raise ValueError(\"satellite must be either 'ERS1' or 'ERS2', was '{}'\".format(satellite))\n    \n    if dbname is None:\n        dbname = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'erspasses.db')\n    with HiddenPrints():\n        con = sqlite_setup(driver=dbname)\n    \n    cursor = con.cursor()\n    acqtime_str = acqtime.strftime('%Y-%m-%d %H:%M:%S.%f')\n    query = '''SELECT * FROM data WHERE satellite = ? AND starttime <= ? AND endtime >= ?'''\n    cursor.execute(query, (sat, acqtime_str, acqtime_str))\n    \n    fetch = cursor.fetchall()\n    if len(fetch) == 0:\n        cursor.execute(query, ('ERS2', acqtime_str, acqtime_str))\n        fetch = cursor.fetchall()\n    \n    result = dict(zip(['satellite', 'phase', 'cycleNumber', 'passNumber'], fetch[0][0:4]))\n    result['satellite'] = satellite\n    result['orbitNumber_rel'] = int(math.ceil(result['passNumber'] / 2.))\n    return result\n"
  },
  {
    "path": "pyroSAR/ERS/mapping.py",
    "content": "from typing import Literal\n\nRESOLUTION_NESZ = {\n    'ERS1': {\n        'IMP': {\n            'IS2': {\n                'res_rg': 25.04,\n                'res_az': 21.51,\n                'nesz_nr': 26.8,\n                'nesz_fr': 26\n            },\n            'std_dev': 20\n        },\n        'IMS': {\n            'IS2': {\n                'res_rg': 5.32,\n                'res_az': 9.66,\n                'nesz_nr': 26.8,\n                'nesz_fr': 26\n            },\n            'std_dev': 20\n        }\n    },\n    'ERS2': {\n        'IMP': {\n            'IS2': {\n                'res_rg': 21.63,\n                'res_az': 25.19,\n                'nesz_nr': 23.1,\n                'nesz_fr': 21.5\n            },\n            'std_dev': 20\n        },\n        'IMS': {\n            'IS2': {\n                'res_rg': 5.33,\n                'res_az': 9.83,\n                'nesz_nr': 23.1,\n                'nesz_fr': 21.5\n            },\n            'std_dev': 20\n        }\n    },\n    'ASAR': {\n        'IMP': {\n            'IS1': {\n                'res_rg': 30.86,\n                'res_az': 22.14,\n                'nesz_nr': 25.1,\n                'nesz_fr': 19.2\n            },\n            'IS2': {\n                'res_rg': 24.90,\n                'res_az': 22.14,\n                'nesz_nr': 21.8,\n                'nesz_fr': 20.5\n            },\n            'IS3': {\n                'res_rg': 24.84,\n                'res_az': 22.14,\n                'nesz_nr': 22.6,\n                'nesz_fr': 20.5\n            },\n            'IS4': {\n                'res_rg': 25.56,\n                'res_az': 22.14,\n                'nesz_nr': 22.3,\n                'nesz_fr': 19.1\n            },\n            'IS5': {\n                'res_rg': 25.73,\n                'res_az': 22.14,\n                'nesz_nr': 21.4,\n                'nesz_fr': 19\n            },\n            'IS6': {\n                'res_rg': 26.15,\n                'res_az': 22.14,\n                'nesz_nr': 24,\n                'nesz_fr': 21.2\n            },\n            'IS7': {\n                'res_rg': 26.59,\n                'res_az': 22.14,\n                'nesz_nr': 23,\n                'nesz_fr': 20.4\n            },\n            'std_dev': 5\n        },\n        'IMS': {\n            'IS1': {\n                'res_rg': 5.77,\n                'res_az': 8.43,\n                'nesz_nr': 25.1,\n                'nesz_fr': 19.2\n            },\n            'IS2': {\n                'res_rg': 5.77,\n                'res_az': 8.43,\n                'nesz_nr': 21.8,\n                'nesz_fr': 20.5\n            },\n            'IS3': {\n                'res_rg': 5.77,\n                'res_az': 8.43,\n                'nesz_nr': 22.6,\n                'nesz_fr': 20.5\n            },\n            'IS4': {\n                'res_rg': 5.77,\n                'res_az': 8.43,\n                'nesz_nr': 22.3,\n                'nesz_fr': 19.1\n            },\n            'IS5': {\n                'res_rg': 5.77,\n                'res_az': 8.43,\n                'nesz_nr': 21.4,\n                'nesz_fr': 19\n            },\n            'IS6': {\n                'res_rg': 5.77,\n                'res_az': 8.43,\n                'nesz_nr': 24,\n                'nesz_fr': 21.2\n            },\n            'IS7': {\n                'res_rg': 5.77,\n                'res_az': 8.43,\n                'nesz_nr': 23,\n                'nesz_fr': 20.4\n            },\n            'std_dev': 5\n        },\n        'APP': {\n            'IS1': {\n                'res_rg': 31.22,\n                'res_rg_new': 31.22,\n                'res_az': 27.45,\n                'nesz_nr': 25.1,\n                'nesz_fr': 19.2\n            },\n            'IS2': {\n                'res_rg': 25.23,\n                'res_rg_new': 24.10,\n                'res_az': 27.45,\n                'nesz_nr': 21.8,\n                'nesz_fr': 20.5\n            },\n            'IS3': {\n                'res_rg': 24.74,\n                'res_rg_new': 24.30,\n                'res_az': 27.45,\n                'nesz_nr': 22.6,\n                'nesz_fr': 20.5\n            },\n            'IS4': {\n                'res_rg': 25.46,\n                'res_rg_new': 25.30,\n                'res_az': 27.45,\n                'nesz_nr': 22.3,\n                'nesz_fr': 19.1\n            },\n            'IS5': {\n                'res_rg': 25.70,\n                'res_rg_new': 25.35,\n                'res_az': 27.45,\n                'nesz_nr': 21.4,\n                'nesz_fr': 19\n            },\n            'IS6': {\n                'res_rg': 26.07,\n                'res_rg_new': 25.90,\n                'res_az': 27.45,\n                'nesz_nr': 24,\n                'nesz_fr': 21.2\n            },\n            'IS7': {\n                'res_rg': 26.53,\n                'res_rg_new': 26.32,\n                'res_az': 27.45,\n                'nesz_nr': 23,\n                'nesz_fr': 20.4\n            },\n            'std_dev': 10\n        },\n        'APS': {\n            'IS1': {\n                'res_rg': 4.3,\n                'res_az': 8.39,\n                'nesz_nr': 25.1,\n                'nesz_fr': 19.2\n            },\n            'IS2': {\n                'res_rg': 4.3,\n                'res_az': 8.39,\n                'nesz_nr': 21.8,\n                'nesz_fr': 20.5\n            },\n            'IS3': {\n                'res_rg': 4.3,\n                'res_az': 8.39,\n                'nesz_nr': 22.6,\n                'nesz_fr': 20.5\n            },\n            'IS4': {\n                'res_rg': 4.3,\n                'res_az': 8.39,\n                'nesz_nr': 22.3,\n                'nesz_fr': 19.1\n            },\n            'IS5': {\n                'res_rg': 4.3,\n                'res_az': 8.39,\n                'nesz_nr': 21.4,\n                'nesz_fr': 19\n            },\n            'IS6': {\n                'res_rg': 4.3,\n                'res_az': 8.39,\n                'nesz_nr': 24,\n                'nesz_fr': 21.2\n            },\n            'IS7': {\n                'res_rg': 4.3,\n                'res_az': 8.39,\n                'nesz_nr': 23,\n                'nesz_fr': 20.4\n            },\n            'std_dev': 10\n        },\n        'WSM': {\n            'WS': {\n                'res_rg': 150,\n                'res_az': 150,\n                'nesz_nr': 19.5,\n                'nesz_fr': 23.5\n            },\n            'std_dev': 20\n        },\n        'WSS': {\n            'WS': {\n                'res_rg': None,\n                'res_az': None,\n                'nesz_nr': None,\n                'nesz_fr': None\n            },\n            'std_dev': None\n        }\n    }\n}\n\n\ndef get_resolution_nesz(\n        sensor: Literal['ERS1', 'ERS2', 'ASAR'],\n        mode: Literal['APP', 'APS', 'IMP', 'IMS', 'WSM', 'WSS'],\n        swath_id: Literal['IS1', 'IS2', 'IS3', 'IS4', 'IS5', 'IS6', 'IS7', 'WS'],\n        date: str\n) -> tuple[int | float | None, int | float | None, int | float | None, int | float | None]:\n    \"\"\"\n    Get acquisition characteristics not contained in the product metadata:\n\n    - range resolution\n    - azimuth resolution\n    - near range noise equivalent sigma zero (NESZ)\n    - far range NESZ\n    \n    Parameters\n    ----------\n    sensor:\n        the satellite sensor\n    mode:\n        the sensor acquisition mode\n    swath_id:\n        the sensor swath ID\n    date:\n        the acquisition date formatted as YYYYmmdd/YYYYmmddTHHMMSS\n\n    Returns\n    -------\n        the attributes listed above\n    \"\"\"\n    suffix = '_new' if mode == 'APP' and date > '20090528' else ''\n    data = RESOLUTION_NESZ[sensor][mode][swath_id]\n    return (data[f'res_rg{suffix}'], data['res_az'],\n            data['nesz_nr'], data['nesz_fr'])\n"
  },
  {
    "path": "pyroSAR/S1/__init__.py",
    "content": "__author__ = 'john'\n\nfrom .auxil import OSV, removeGRDBorderNoise\n"
  },
  {
    "path": "pyroSAR/S1/auxil.py",
    "content": "###############################################################################\n# general utilities for Sentinel-1\n\n# Copyright (c) 2016-2025, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\n\nimport os\nimport re\nimport sys\nimport requests\nimport tempfile\nimport zipfile as zf\nfrom io import BytesIO\nfrom datetime import datetime, timedelta\nfrom dateutil import parser as dateutil_parser\nfrom dateutil.relativedelta import relativedelta\nimport xml.etree.ElementTree as ET\nimport numpy as np\nfrom osgeo import gdal\nfrom osgeo.gdalconst import GA_Update\nfrom . import linesimplify as ls\nfrom pyroSAR.examine import ExamineSnap\nfrom pyroSAR.ancillary import Lock\nimport progressbar as pb\n\nfrom spatialist.ancillary import finder\n\nimport logging\n\nlog = logging.getLogger(__name__)\n\ntry:\n    import argparse\nexcept ImportError:\n    try:\n        os.remove(os.path.join(os.path.dirname(sys.argv[0]), 'locale.pyc'))\n    finally:\n        import argparse\n\n\ndef init_parser():\n    \"\"\"\n    initialize argument parser for S1 processing utilities\n    \"\"\"\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-t', '--transform', action='store_true', help='transform the final DEM to UTM coordinates')\n    parser.add_argument('-l', '--logfiles', action='store_true', help='create logfiles of the executed GAMMA commands')\n    parser.add_argument('-i', '--intermediates', action='store_true', help='keep intermediate files')\n    parser.add_argument('-q', '--quiet', action='store_true', help='suppress standard console prints')\n    parser.add_argument('-tr', '--targetresolution', default=20, help='the target resolution in meters for x and y',\n                        type=int)\n    parser.add_argument('-fg', '--func_geoback', default=2, help='backward geocoding interpolation function; '\n                                                                 '0 - Nearest Neighbor, 1 - Bicubic Spline, 2 - Bicubic Spline-Log; '\n                                                                 'method 1: negative values possible (e.g. in urban areas) - use method 2 to avoid this',\n                        type=int)\n    parser.add_argument('-fi', '--func_interp', default=0,\n                        help='function for interpolation of layover/shadow/foreshortening/DEM gaps; '\n                             '0 - set to 0, 1 - linear interpolation, 2 - actual value, 3 - nn-thinned', type=int)\n    parser.add_argument('-poe', '--poedir', default=None,\n                        help='directory containing aux_poeorb (precise orbit ephemerides) orbit state vector files')\n    parser.add_argument('-res', '--resdir', default=None,\n                        help='directory containing aux_resorb (restituted orbit) orbit state vector files')\n    parser.add_argument('zipfile', help='S1 zipped scene archive to be used')\n    parser.add_argument('tempdir', help='temporary directory for intermediate files')\n    parser.add_argument('outdir', help='output directory')\n    parser.add_argument('srtmdir', help='directory containing SRTM hgt tiles (subdirectories possible)')\n    return parser\n\n\n# todo check existence not by file name but by start and stop time; files are sometimes re-published\nclass OSV(object):\n    \"\"\"\n    interface for management of S1 Orbit State Vector (OSV) files\n\n    input is a directory which is supposed to contain, or already contains, OSV files.\n    Two subdirectories are expected and created otherwise:\n    one for Precise Orbit Ephemerides (POE) named POEORB and one for Restituted Orbit (RES) files named RESORB\n\n    Using method :meth:`match` the corresponding POE (priority) or RES file is returned for a timestamp.\n    Timestamps are always handled in the format YYYYmmddTHHMMSS.\n\n    Parameters\n    ----------\n    osvdir: str\n        the directory to write the orbit files to\n    timeout: int or tuple or None\n        the timeout in seconds for downloading OSV files as provided to :func:`requests.get`\n    \n    See Also\n    --------\n    `requests timeouts <https://requests.readthedocs.io/en/master/user/advanced/#timeouts>`_\n    \"\"\"\n    \n    def __init__(self, osvdir=None, timeout=300):\n        self.timeout = timeout\n        if osvdir is None:\n            try:\n                auxdatapath = ExamineSnap().auxdatapath\n            except AttributeError:\n                auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')\n            osvdir = os.path.join(auxdatapath, 'Orbits', 'Sentinel-1')\n        self.outdir_poe = os.path.join(osvdir, 'POEORB')\n        self.outdir_res = os.path.join(osvdir, 'RESORB')\n        self.pattern = r'S1[ABCD]_OPER_AUX_(?:POE|RES)ORB_OPOD_[0-9TV_]{48}\\.EOF'\n        self.pattern_fine = r'(?P<sensor>S1[ABCD])_OPER_AUX_' \\\n                            r'(?P<type>(?:POE|RES)ORB)_OPOD_' \\\n                            r'(?P<publish>[0-9]{8}T[0-9]{6})_V' \\\n                            r'(?P<start>[0-9]{8}T[0-9]{6})_' \\\n                            r'(?P<stop>[0-9]{8}T[0-9]{6})\\.EOF'\n        self._init_dir()\n        self._reorganize()\n    \n    def __enter__(self):\n        return self\n    \n    def __exit__(self, exc_type, exc_val, exc_tb):\n        return\n    \n    def _init_dir(self):\n        \"\"\"\n        create directories if they don't exist yet\n        \"\"\"\n        for dir in [self.outdir_poe, self.outdir_res]:\n            if not os.path.isdir(dir):\n                os.makedirs(dir)\n    \n    def _parse(self, file):\n        basename = os.path.basename(file)\n        groups = re.match(self.pattern_fine, basename).groupdict()\n        return groups\n    \n    def _reorganize(self):\n        \"\"\"\n        compress and move EOF files into subdirectories\n\n        Returns\n        -------\n\n        \"\"\"\n        message = True\n        for subdir in [self.outdir_poe, self.outdir_res]:\n            if not os.path.isdir(subdir):\n                continue\n            files = finder(subdir, [self.pattern], recursive=False, regex=True)\n            for eof in files:\n                base = os.path.basename(eof)\n                target = os.path.join(self._subdir(eof), base + '.zip')\n                os.makedirs(os.path.dirname(target), exist_ok=True)\n                if not os.path.isfile(target):\n                    if message:\n                        log.info('compressing and reorganizing EOF files')\n                        message = False\n                    with zf.ZipFile(file=target,\n                                    mode='w',\n                                    compression=zf.ZIP_DEFLATED) as zip:\n                        zip.write(filename=eof,\n                                  arcname=base)\n                os.remove(eof)\n    \n    def _typeEvaluate(self, osvtype):\n        \"\"\"\n        evaluate the 'osvtype' method argument and return the corresponding remote repository and local directory\n\n        Parameters\n        ----------\n        osvtype: str\n            the type of orbit files required; either 'POE' or 'RES'\n\n        Returns\n        -------\n        tuple of str\n            the remote repository and local directory of the osv type\n        \"\"\"\n        if osvtype not in ['POE', 'RES']:\n            raise IOError('type must be either \"POE\" or \"RES\"')\n        if osvtype == 'POE':\n            return self.outdir_poe\n        else:\n            return self.outdir_res\n    \n    def __catch_aux_sentinel(self, sensor, start, stop, osvtype='POE'):\n        url = 'http://aux.sentinel1.eo.esa.int'\n        skeleton = '{url}/{osvtype}ORB/{year}/{month:02d}/{day:02d}/'\n        \n        files = []\n        date_search = start\n        busy = True\n        while busy:\n            url_sub = skeleton.format(url=url,\n                                      osvtype=osvtype,\n                                      year=date_search.year,\n                                      month=date_search.month,\n                                      day=date_search.day)\n            response = requests.get(url_sub, timeout=self.timeout)\n            response.raise_for_status()\n            result = response.text\n            files_sub = list(set(re.findall(self.pattern, result)))\n            if len(files_sub) == 0:\n                break\n            for file in files_sub:\n                match = re.match(self.pattern_fine, file)\n                start2 = datetime.strptime(match.group('start'), '%Y%m%dT%H%M%S')\n                stop2 = datetime.strptime(match.group('stop'), '%Y%m%dT%H%M%S')\n                if sensor == match.group('sensor'):\n                    if start2 < stop and stop2 > start:\n                        log.info(url_sub)\n                        files.append({'filename': file,\n                                      'href': url_sub + '/' + file,\n                                      'auth': None})\n                if start2 >= stop:\n                    busy = False\n            date_search += timedelta(days=1)\n        \n        return files\n    \n    def __catch_step_auxdata(self, sensor, start, stop, osvtype='POE'):\n        url = 'https://step.esa.int/auxdata/orbits/Sentinel-1'\n        skeleton = '{url}/{osvtype}ORB/{sensor}/{year}/{month:02d}/'\n        \n        if osvtype not in ['POE', 'RES']:\n            raise RuntimeError(\"osvtype must be either 'POE' or 'RES'\")\n        \n        if isinstance(sensor, str):\n            sensor = [sensor]\n        \n        files = []\n        date_search_final = datetime(year=stop.year, month=stop.month, day=1)\n        for sens in sensor:\n            date_search = datetime(year=start.year,\n                                   month=start.month,\n                                   day=1)\n            date_search -= relativedelta(months=1)\n            busy = True\n            while busy:\n                url_sub = skeleton.format(url=url,\n                                          osvtype=osvtype,\n                                          sensor=sens,\n                                          year=date_search.year,\n                                          month=date_search.month)\n                log.info(url_sub)\n                response = requests.get(url_sub, timeout=self.timeout)\n                if response.status_code != 404:\n                    response.raise_for_status()\n                    result = response.text\n                    files_sub = list(set(re.findall(self.pattern, result)))\n                    for file in files_sub:\n                        match = re.match(self.pattern_fine, file)\n                        start2 = datetime.strptime(match.group('start'), '%Y%m%dT%H%M%S')\n                        stop2 = datetime.strptime(match.group('stop'), '%Y%m%dT%H%M%S')\n                        if start2 < stop and stop2 > start:\n                            files.append({'filename': file,\n                                          'href': url_sub + '/' + file + '.zip',\n                                          'auth': None})\n                if date_search == date_search_final:\n                    busy = False\n                date_search += relativedelta(months=1)\n                if date_search > datetime.now():\n                    busy = False\n        return files\n    \n    def __catch_gnss(self, sensor, start, stop, osvtype='POE'):\n        url = 'https://scihub.copernicus.eu/gnss'\n        redirect = 'https://dhusfeed.dhus.onda-dias.net/gnss'\n        auth = ('gnssguest', 'gnssguest')\n        # a dictionary for storing the url arguments\n        query = {}\n        \n        if osvtype == 'POE':\n            query['producttype'] = 'AUX_POEORB'\n        elif osvtype == 'RES':\n            query['producttype'] = 'AUX_RESORB'\n        else:\n            raise RuntimeError(\"osvtype must be either 'POE' or 'RES'\")\n        \n        if sensor in ['S1A', 'S1B', 'S1C', 'S1D']:\n            query['platformname'] = 'Sentinel-1'\n            # filename starts w/ sensor\n            query['filename'] = '{}*'.format(sensor)\n        elif sorted(sensor) == ['S1A', 'S1B', 'S1C', 'S1D']:\n            query['platformname'] = 'Sentinel-1'\n        else:\n            raise RuntimeError('unsupported input for parameter sensor')\n        \n        # the collection of files to be returned\n        collection = []\n        \n        date_start = start.strftime('%Y-%m-%dT%H:%M:%SZ')\n        date_stop = stop.strftime('%Y-%m-%dT%H:%M:%SZ')\n        \n        # append the time frame to the query dictionary\n        query['beginPosition'] = '[{} TO {}]'.format(date_start, date_stop)\n        query['endPosition'] = '[{} TO {}]'.format(date_start, date_stop)\n        query_list = []\n        for keyword, value in query.items():\n            query_elem = '{}:{}'.format(keyword, value)\n            query_list.append(query_elem)\n        query_str = ' '.join(query_list)\n        target = '{}/search?q={}&format=json'.format(url, query_str)\n        log.info(target)\n        \n        def _parse_gnsssearch_json(search_dict):\n            parsed_dict = {}\n            # Will return ['entry'] as dict if only one item\n            # If so just make a list\n            if isinstance(search_dict, dict):\n                search_dict = [search_dict]\n            for entry in search_dict:\n                id = entry['id']\n                entry_dict = {}\n                \n                for key, value in entry.items():\n                    if key == 'title':\n                        entry_dict[key] = value\n                    elif key == 'id':\n                        entry_dict[key] = value\n                    elif key == 'ondemand':\n                        if value.lower() == 'true':\n                            entry_dict[key] = True\n                        else:\n                            entry_dict[key] = False\n                    elif key == 'str':\n                        for elem in value:\n                            entry_dict[elem['name']] = elem['content']\n                    elif key == 'link':\n                        for elem in value:\n                            if 'rel' in elem.keys():\n                                href_key = 'href_' + elem['rel']\n                                entry_dict[href_key] = elem['href']\n                            else:\n                                entry_dict['href'] = elem['href']\n                    elif key == 'date':\n                        for elem in value:\n                            entry_dict[elem['name']] = dateutil_parser.parse(elem['content'])\n                \n                parsed_dict[id] = entry_dict\n            return parsed_dict\n        \n        def _parse_gnsssearch_response(response_json):\n            if 'entry' in response_json.keys():\n                search_dict = response_json['entry']\n                parsed_dict = _parse_gnsssearch_json(search_dict)\n            else:\n                parsed_dict = {}\n            return parsed_dict\n        \n        response = requests.get(target, auth=auth, timeout=self.timeout)\n        response.raise_for_status()\n        response_json = response.json()['feed']\n        total_results = response_json['opensearch:totalResults']\n        subquery = [link['href'] for link in response_json['link'] if link['rel'] == 'self'][0]\n        subquery = subquery.replace(redirect, url.strip())\n        if int(total_results) > 10:\n            subquery = subquery.replace('rows=10', 'rows=100')\n        while subquery:\n            subquery_response = requests.get(subquery, auth=auth, timeout=self.timeout)\n            subquery_response.raise_for_status()\n            subquery_json = subquery_response.json()['feed']\n            subquery_products = _parse_gnsssearch_response(subquery_json)\n            items = list(subquery_products.values())\n            for item in items:\n                item['auth'] = auth\n            collection += list(subquery_products.values())\n            if 'next' in [link['rel'] for link in subquery_json['link']]:\n                subquery = [link['href'] for link in subquery_json['link'] if link['rel'] == 'next'][0]\n                subquery = subquery.replace(redirect, url.strip())\n            else:\n                subquery = None\n        if osvtype == 'RES' and self.maxdate('POE', 'stop') is not None:\n            collection = [x for x in collection\n                          if self.date(x['filename'], 'start') > self.maxdate('POE', 'stop')]\n        for item in collection:\n            item['href'] = item['href'].replace(redirect, url)\n        return collection\n    \n    def catch(self, sensor, osvtype='POE', start=None, stop=None, url_option=1):\n        \"\"\"\n        check a server for files\n\n        Parameters\n        ----------\n        sensor: str or list[str]\n            The S1 mission(s):\n            \n             - 'S1A'\n             - 'S1B'\n             - 'S1C'\n             - 'S1D'\n        osvtype: str\n            the type of orbit files required\n        start: str or None\n            the date to start searching for files in format YYYYmmddTHHMMSS\n        stop: str or None\n            the date to stop searching for files in format YYYYmmddTHHMMSS\n        url_option: int\n            the OSV download URL option\n            \n             - 1: https://step.esa.int/auxdata/orbits/Sentinel-1\n\n        Returns\n        -------\n        list[dict]\n            the product dictionary of the remote OSV files, with href\n        \"\"\"\n        \n        log.info('searching for new {} files'.format(osvtype))\n        \n        if start is not None:\n            start = datetime.strptime(start, '%Y%m%dT%H%M%S')\n        else:\n            start = datetime.strptime('2014-07-31', '%Y-%m-%d')\n        # set the defined date or the current date otherwise\n        if stop is not None:\n            stop = datetime.strptime(stop, '%Y%m%dT%H%M%S')\n        else:\n            stop = datetime.now()\n        \n        if url_option == 1:\n            items = self.__catch_step_auxdata(sensor, start, stop, osvtype)\n        else:\n            raise ValueError(\"unknown URL option\")\n        \n        if osvtype == 'RES' and self.maxdate('POE', 'stop') is not None:\n            items = [x for x in items\n                     if self.date(x['filename'], 'start') > self.maxdate('POE', 'stop')]\n        log.info('found {} results'.format(len(items)))\n        \n        return items\n    \n    def date(self, file, datetype):\n        \"\"\"\n        extract a date from an OSV file name\n\n        Parameters\n        ----------\n        file: str\n            the OSV file\n        datetype: {'publish', 'start', 'stop'}\n            one of three possible date types contained in the OSV filename\n\n        Returns\n        -------\n        str\n            a time stamp in the format YYYYmmddTHHMMSS\n        \"\"\"\n        return self._parse(file)[datetype]\n    \n    def clean_res(self):\n        \"\"\"\n        delete all RES files for whose date a POE file exists\n        \"\"\"\n        maxdate_poe = self.maxdate('POE', 'stop')\n        if maxdate_poe is not None:\n            deprecated = [x for x in self.getLocals('RES') if self.date(x, 'stop') < maxdate_poe]\n            log.info('deleting {} RES file{}'.format(len(deprecated), '' if len(deprecated) == 1 else 's'))\n            for item in deprecated:\n                os.remove(item)\n    \n    def getLocals(self, osvtype='POE'):\n        \"\"\"\n        get a list of local files of specific type\n\n        Parameters\n        ----------\n        osvtype: {'POE', 'RES'}\n            the type of orbit files required\n\n        Returns\n        -------\n        list[str]\n            a selection of local OSV files\n        \"\"\"\n        directory = self._typeEvaluate(osvtype)\n        return finder(directory, [self.pattern], regex=True)\n    \n    def maxdate(self, osvtype='POE', datetype='stop'):\n        \"\"\"\n        return the latest date of locally existing POE/RES files\n\n        Parameters\n        ----------\n        osvtype: {'POE', 'RES'}\n            the type of orbit files required\n        datetype: {'publish', 'start', 'stop'}\n            one of three possible date types contained in the OSV filename\n\n        Returns\n        -------\n        str\n            a timestamp in format YYYYmmddTHHMMSS\n        \"\"\"\n        directory = self._typeEvaluate(osvtype)\n        files = finder(directory, [self.pattern], regex=True)\n        return max([self.date(x, datetype) for x in files]) if len(files) > 0 else None\n    \n    def mindate(self, osvtype='POE', datetype='start'):\n        \"\"\"\n        return the earliest date of locally existing POE/RES files\n\n        Parameters\n        ----------\n        osvtype: {'POE', 'RES'}\n            the type of orbit files required\n        datetype: {'publish', 'start', 'stop'}\n            one of three possible date types contained in the OSV filename\n\n        Returns\n        -------\n        str\n            a timestamp in format YYYYmmddTHHMMSS\n        \"\"\"\n        directory = self._typeEvaluate(osvtype)\n        files = finder(directory, [self.pattern], regex=True)\n        return min([self.date(x, datetype) for x in files]) if len(files) > 0 else None\n    \n    def match(self, sensor, timestamp, osvtype='POE'):\n        \"\"\"\n        return the corresponding OSV file for the provided sensor and time stamp.\n        The file returned is one which covers the acquisition time and, if multiple exist,\n        the one which was published last.\n        In case a list of options is provided as osvtype, the file of higher accuracy (i.e. POE over RES) is returned.\n\n        Parameters\n        ----------\n        sensor: str\n            The S1 mission:\n            \n             - 'S1A'\n             - 'S1B'\n        timestamp: str\n            the time stamp in the format 'YYYmmddTHHMMSS'\n        osvtype: str or list[str]\n            the type of orbit files required; either 'POE', 'RES' or a list of both\n\n        Returns\n        -------\n        str\n            the best matching orbit file (overlapping time plus latest publication date)\n        \"\"\"\n        # list all locally existing files of the defined type\n        if osvtype in ['POE', 'RES']:\n            locals = self.getLocals(osvtype)\n            # filter the files to those which contain data for the defined time stamp\n            files = [x for x in locals if self.date(x, 'start') <= timestamp <= self.date(x, 'stop')]\n            files = [x for x in files if os.path.basename(x).startswith(sensor)]\n            if len(files) > 0:\n                # select the file which was published last\n                best = self.sortByDate(files, 'publish')[-1]\n                return best\n            elif len(files) == 1:\n                return files[0]\n            return None\n        elif sorted(osvtype) == ['POE', 'RES']:\n            best = self.match(sensor=sensor, timestamp=timestamp, osvtype='POE')\n            if not best:\n                best = self.match(sensor=sensor, timestamp=timestamp, osvtype='RES')\n            return best\n    \n    def retrieve(self, products, pbar=False):\n        \"\"\"\n        download a list of product dictionaries into the respective subdirectories, i.e. POEORB or RESORB\n\n        Parameters\n        ----------\n        products: list[dict]\n            a list of remotely existing OSV product dictionaries as returned by method :meth:`catch`\n        pbar: bool\n            add a progressbar?\n\n        Returns\n        -------\n        \"\"\"\n        downloads = []\n        for product in products:\n            if all(key not in ['filename', 'href'] for key in product.keys()):\n                raise RuntimeError(\"product dictionaries must contain 'filename' and 'href' keys\")\n            basename = product['filename']\n            remote = product['href']\n            auth = product['auth']\n            \n            outdir = self._subdir(basename)\n            os.makedirs(outdir, exist_ok=True)\n            local = os.path.join(outdir, basename) + '.zip'\n            if not os.path.isfile(local):\n                downloads.append((remote, local, basename, auth))\n        if len(downloads) == 0:\n            return\n        log.info('downloading {} file{}'.format(len(downloads), '' if len(downloads) == 1 else 's'))\n        if pbar:\n            progress = pb.ProgressBar(max_value=len(downloads))\n        else:\n            progress = None\n        i = 0\n        for remote, local, basename, auth in downloads:\n            with Lock(local):\n                if not os.path.isfile(local):\n                    response = requests.get(remote, auth=auth, timeout=self.timeout)\n                    response.raise_for_status()\n                    infile = response.content\n                    try:\n                        if remote.endswith('.zip'):\n                            with zf.ZipFile(file=BytesIO(infile)) as tmp:\n                                members = tmp.namelist()\n                                target = [x for x in members if re.search(basename, x)][0]\n                                with zf.ZipFile(local, 'w') as outfile:\n                                    outfile.writestr(data=tmp.read(target),\n                                                     zinfo_or_arcname=basename)\n                        else:\n                            with zf.ZipFile(\n                                    file=local,\n                                    mode='w',\n                                    compression=zf.ZIP_DEFLATED\n                            ) as outfile:\n                                outfile.writestr(zinfo_or_arcname=basename,\n                                                 data=infile)\n                    except Exception as e:\n                        os.remove(local)\n                        raise\n            if pbar:\n                i += 1\n                progress.update(i)\n        if pbar:\n            progress.finish()\n        self.clean_res()\n    \n    def sortByDate(self, files, datetype='start'):\n        \"\"\"\n        sort a list of OSV files by a specific date type\n\n        Parameters\n        ----------\n        files: list[str]\n            some OSV files\n        datetype: {'publish', 'start', 'stop'}\n            one of three possible date types contained in the OSV filename\n\n        Returns\n        -------\n        list[str]\n            the input OSV files sorted by the defined date\n        \"\"\"\n        return sorted(files, key=lambda x: self.date(x, datetype))\n    \n    def _subdir(self, file):\n        \"\"\"\n        | return the subdirectory in which to store the EOF file,\n        | i.e. basedir/{type}ORB/{sensor}/{year}/{month}\n        | e.g. basedir/POEORB/S1A/2018/12\n\n        Parameters\n        ----------\n        file: str\n            the EOF filename\n\n        Returns\n        -------\n        str\n            the target directory\n        \"\"\"\n        attr = self._parse(file)\n        outdir = self._typeEvaluate(attr['type'][:3])\n        start = self.date(file, datetype='start')\n        start = datetime.strptime(start, '%Y%m%dT%H%M%S')\n        month = '{:02d}'.format(start.month)\n        outdir = os.path.join(outdir, attr['sensor'],\n                              str(start.year), month)\n        return outdir\n\n\ndef removeGRDBorderNoise(scene, method='pyroSAR'):\n    \"\"\"\n    Mask out Sentinel-1 image border noise. This function implements the method for removing GRD border noise as\n    published by ESA :cite:`Miranda2018` and implemented in SNAP and additionally adds further refinement of the result using an image\n    border line simplification approach. In this approach the border between valid and invalid pixels is first\n    simplified using the poly-line vertex reduction method by Visvalingam and Whyatt :cite:`Visvalingam1993`.\n    The line segments of the new border are then shifted until all pixels considered invalid before the simplification\n    are again on one side of the line. See image below for further clarification.\n\n    Parameters\n    ----------\n    scene: pyroSAR.drivers.SAFE\n        the Sentinel-1 scene object\n    method: str\n        the border noise removal method to be applied; one of the following:\n        \n         - 'ESA': the pure implementation as described by ESA\n         - 'pyroSAR': the ESA method plus the custom pyroSAR refinement\n\n\n    .. figure:: figures/S1_bnr.png\n        :scale: 30%\n\n        Demonstration of the border noise removal for a vertical left image border. The area under the respective lines\n        covers pixels considered valid, everything above will be masked out. The blue line is the result of the noise\n        removal as recommended by ESA, in which a lot of noise is still present. The red line is the over-simplified\n        result using the Visvalingam-Whyatt method. The green line is the final result after further correcting the\n        VW-simplified result.\n\n    \"\"\"\n    if scene.product != 'GRD':\n        raise RuntimeError('this method is intended for GRD only')\n    \n    if scene.compression is not None:\n        raise RuntimeError('scene is not yet unpacked')\n    \n    if method not in ['pyroSAR', 'ESA']:\n        raise AttributeError(\"parameter 'method' must be either 'pyroSAR' or 'ESA'\")\n    \n    blocksize = 2000\n    \n    # compute noise scaling factor\n    if scene.meta['IPF_version'] >= 2.9:\n        log.info('border noise removal not necessary for IPF version {}'.format(scene.meta['IPF_version']))\n        return\n    elif scene.meta['IPF_version'] <= 2.5:\n        knoise = {'IW': 75088.7, 'EW': 56065.87}[scene.acquisition_mode]\n        cads = scene.getFileObj(scene.findfiles('calibration-s1[ab]-[ie]w-grd-(?:hh|vv)')[0])\n        caltree = ET.fromstring(cads.read())\n        cads.close()\n        adn = float(caltree.find('.//calibrationVector/dn').text.split()[0])\n        if scene.meta['IPF_version'] < 2.34:\n            scalingFactor = knoise * adn\n        else:\n            scalingFactor = knoise * adn * adn\n    else:\n        scalingFactor = 1\n    \n    # read noise vectors from corresponding annotation xml\n    noisefile = scene.getFileObj(scene.findfiles('noise-s1[ab]-[ie]w-grd-(?:hh|vv)')[0])\n    noisetree = ET.fromstring(noisefile.read())\n    noisefile.close()\n    noiseVectors = noisetree.findall('.//noiseVector')\n    \n    # define boundaries of image subsets to be masked (4x the first lines/samples of the image boundaries)\n    subsets = [(0, 0, blocksize, scene.lines),\n               (0, 0, scene.samples, blocksize),\n               (scene.samples - blocksize, 0, scene.samples, scene.lines),\n               (0, scene.lines - blocksize, scene.samples, scene.lines)]\n    \n    # extract column indices of noise vectors\n    yi = np.array([int(x.find('line').text) for x in noiseVectors])\n    \n    # create links to the tif files for a master co-polarization and all other polarizations as slaves\n    master = scene.findfiles('s1.*(?:vv|hh).*tiff')[0]\n    ras_master = gdal.Open(master, GA_Update)\n    ras_slaves = [gdal.Open(x, GA_Update) for x in scene.findfiles('s1.*tiff') if x != master]\n    \n    outband_master = ras_master.GetRasterBand(1)\n    outband_slaves = [x.GetRasterBand(1) for x in ras_slaves]\n    \n    # iterate over the four image subsets\n    for subset in subsets:\n        log.info(subset)\n        xmin, ymin, xmax, ymax = subset\n        xdiff = xmax - xmin\n        ydiff = ymax - ymin\n        # linear interpolation of noise vectors to array\n        noise_interp = np.empty((ydiff, xdiff), dtype=float)\n        for i in range(0, len(noiseVectors)):\n            if ymin <= yi[i] <= ymax:\n                # extract row indices of noise vector\n                xi = [int(x) for x in noiseVectors[i].find('pixel').text.split()]\n                # extract noise values\n                noise = [float(x) for x in noiseVectors[i].find('noiseLut').text.split()]\n                # interpolate values along rows\n                noise_interp[yi[i] - ymin, :] = np.interp(range(0, xdiff), xi, noise)\n        for i in range(0, xdiff):\n            yi_t = yi[(ymin <= yi) & (yi <= ymax)] - ymin\n            # interpolate values along columns\n            noise_interp[:, i] = np.interp(range(0, ydiff), yi_t, noise_interp[:, i][yi_t])\n        \n        # read subset of image to array and subtract interpolated noise (denoising)\n        mat_master = outband_master.ReadAsArray(*[xmin, ymin, xdiff, ydiff])\n        denoisedBlock = mat_master.astype(float) ** 2 - noise_interp * scalingFactor\n        # mask out all pixels with a value below 0.5 in the denoised block or 30 in the original block\n        denoisedBlock[(denoisedBlock < 0.5) | (mat_master < 30)] = 0\n        denoisedBlock = np.sqrt(denoisedBlock)\n        \n        if method == 'pyroSAR':\n            # helper functions for masking out negative values\n            def helper1(x):\n                return len(x) - np.argmax(x > 0)\n            \n            def helper2(x):\n                return len(x) - np.argmax(x[::-1] > 0)\n            \n            # mask out negative values and simplify borders (custom implementation)\n            if subset == (0, 0, blocksize, scene.lines):\n                border = np.apply_along_axis(helper1, 1, denoisedBlock)\n                border = blocksize - ls.reduce(border)\n                for j in range(0, ydiff):\n                    denoisedBlock[j, :border[j]] = 0\n                    denoisedBlock[j, border[j]:] = 1\n            elif subset == (0, scene.lines - blocksize, scene.samples, scene.lines):\n                border = np.apply_along_axis(helper2, 0, denoisedBlock)\n                border = ls.reduce(border)\n                for j in range(0, xdiff):\n                    denoisedBlock[border[j]:, j] = 0\n                    denoisedBlock[:border[j], j] = 1\n            elif subset == (scene.samples - blocksize, 0, scene.samples, scene.lines):\n                border = np.apply_along_axis(helper2, 1, denoisedBlock)\n                border = ls.reduce(border)\n                for j in range(0, ydiff):\n                    denoisedBlock[j, border[j]:] = 0\n                    denoisedBlock[j, :border[j]] = 1\n            elif subset == (0, 0, scene.samples, blocksize):\n                border = np.apply_along_axis(helper1, 0, denoisedBlock)\n                border = blocksize - ls.reduce(border)\n                for j in range(0, xdiff):\n                    denoisedBlock[:border[j], j] = 0\n                    denoisedBlock[border[j]:, j] = 1\n        \n        mat_master[denoisedBlock == 0] = 0\n        # write modified array back to original file\n        outband_master.WriteArray(mat_master, xmin, ymin)\n        outband_master.FlushCache()\n        # perform reading, masking and writing for all other polarizations\n        for outband in outband_slaves:\n            mat = outband.ReadAsArray(*[xmin, ymin, xdiff, ydiff])\n            mat[denoisedBlock == 0] = 0\n            outband.WriteArray(mat, xmin, ymin)\n            outband.FlushCache()\n    # detach file links\n    outband_master = None\n    ras_master = None\n    for outband in outband_slaves:\n        outband = None\n    for ras in ras_slaves:\n        ras = None\n"
  },
  {
    "path": "pyroSAR/S1/linesimplify.py",
    "content": "###############################################################################\n# Utilities for simplification of lines used by pyroSAR for border noise removal\n\n# Copyright (c) 2017-2020, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\n\nfrom osgeo import ogr\nimport numpy as np\nfrom spatialist.ancillary import rescale\nfrom .polysimplify import VWSimplifier\n\n\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom matplotlib.patches import Polygon\nfrom matplotlib.collections import PatchCollection\nmatplotlib.rcParams['font.size'] = 12\n\n\ndef simplify(x, y, maxpoints=20):\n    x = list(map(float, x))\n    y = list(map(float, y))\n    pts = np.array(list(zip(x, y)))\n    simplifier = VWSimplifier(pts)\n    sqd = []\n    iter_range = range(2, maxpoints + 1)\n    for i in iter_range:\n        VWpts = simplifier.from_number(i)\n        xn, yn = zip(*VWpts)\n        out = np.sum((y - np.interp(x, xn, yn)) ** 2)\n        sqd.append(out)\n    # sqd /= max(sqd)\n    if min(sqd) == max(sqd):\n        VWpts = simplifier.from_number(2)\n        return VWpts\n    else:\n        sqd = rescale(sqd)\n        # plt.plot(sqd)\n        # plt.show()\n        # iter = (np.array(iter_range) - 2) / (maxpoints - 2.)\n        # plt.plot(iter_range, sqd, label='residual')\n        # plt.plot(iter_range, iter, color='r', label='iteration')\n        # plt.plot(iter_range, iter + sqd, color='g', label='residual+iteration')\n        # plt.legend(loc='upper center', shadow=True)\n        # plt.show()\n        # npoints = np.argmin(iter + sqd) + 2\n        npoints = np.argmax(np.array(sqd) < 0.01) + 2\n        VWpts = simplifier.from_number(npoints)\n        return VWpts\n\n\ndef createPoly(xn, yn, xmax, ymax, plot=False):\n    \"\"\"\n    create an OGR geometry from a sequence of indices\n    \n    Parameters\n    ----------\n    xn: numpy.ndarray\n        the x indices of the points\n    yn: numpy.ndarray\n        the y  indices of the points\n    xmax: int or float\n        the maximum x index value\n    ymax: int or float\n        the maximum y index value\n\n    Returns\n    -------\n    osgeo.ogr.Geometry\n    \"\"\"\n    ring = ogr.Geometry(ogr.wkbLinearRing)\n    ring.AddPoint_2D(0, 0)\n    for item in zip(xn, yn):\n        item = list(map(int, item))\n        if item != [0, 0] and item != [xmax, ymax]:\n            ring.AddPoint_2D(item[0], item[1])\n    ring.AddPoint_2D(xmax, ymax)\n    ring.AddPoint_2D(xmax, 0)\n    ring.CloseRings()\n    poly = ogr.Geometry(ogr.wkbPolygon)\n    poly.AddGeometry(ring)\n    if plot:\n        fig, ax = plt.subplots()\n        pts = ring.GetPoints()\n        arr = np.array(pts)\n        polygon = Polygon(arr, True)\n        p = PatchCollection([polygon], cmap=matplotlib.cm.jet, alpha=0.4)\n        ax.add_collection(p)\n        ax.autoscale_view()\n        plt.scatter(arr[:, 0], arr[:, 1], s=10, color='red')\n        plt.show()\n    return poly\n\n\ndef reduce(seq, maxpoints=20, straighten=False, plot=False):\n    \"\"\"\n    reduce the complexity of a line; the following steps are performed:\n     - simplify the line using the Visvalingam-Whyatt method\n     - iteratively add points on the original line back to the simplified line\n       until the polygon spanned by the simplified line and (xmin, ymin) does not\n       contain any further points of the original line; the polygon area is\n       expected to only cover valid pixels of the image\n     - optionally further straighten the result for smoother edges\n    \n    Parameters\n    ----------\n    seq: numpy.ndarray\n        the 1D line sequence to be simplified\n    maxpoints: int\n        the maximum number points in the simplified sequence\n    straighten: bool\n        perform additional straightening on the simplified line?\n    plot: bool\n        plot the results?\n    \n    Returns\n    -------\n    numpy.ndarray\n        the simplified line sequence\n    \"\"\"\n    if min(seq) == max(seq):\n        return np.array(seq)\n    x = list(range(0, len(seq)))\n    if plot:\n        plt.plot(seq, label='ESA-corrected')\n    # simplify the sequence using the Visvalingam-Whyatt algorithm\n    VWpts = simplify(x, seq, maxpoints)\n    xn, yn = [list(x) for x in zip(*VWpts)]\n    if plot:\n        plt.plot(xn, yn, linewidth=2, color='r', label='VW-simplified')\n    simple = np.interp(x, xn, yn)\n    # create a list of OGR points for the original border\n    points = []\n    for xi, yi in enumerate(seq):\n        point = ogr.Geometry(ogr.wkbPoint)\n        point.AddPoint(int(xi), int(yi))\n        points.append(point)\n    points = np.array(points)\n    while True:\n        # create a polygon containing all pixels inside the simplified border\n        # i.e., containing the area considered valid\n        poly = createPoly(xn, yn, seq.size, int(max(seq)))\n        # create an OGR line from the simplified border points\n        line = ogr.Geometry(ogr.wkbLineString)\n        for xi, yi in zip(xn, yn):\n            line.AddPoint(xi, yi)\n        # compute the distance of each original point to the simplified line\n        dists = np.array([line.Distance(point) for point in points])\n        # check which points are inside of the polygon\n        contain = np.array([point.Within(poly) for point in points])\n        # remove points outside the polygon and stop if\n        # no further points outside the polygon exist\n        dists[~contain] = 0\n        points = points[(dists > 0)]\n        dists = dists[(dists > 0)]\n        if len(dists) == 0:\n            break\n        # select the point with the largest distance to the simplified\n        # line and add it to the list of simplified points\n        # this reduces the size of the polygon an thus the area considered valid\n        candidate = points[np.argmax(dists)]\n        cp = candidate.GetPoint()\n        index = np.argmin(np.array(xn) < cp[0])\n        xn.insert(index, cp[0])\n        yn.insert(index, cp[1])\n    if plot:\n        plt.plot(xn, yn, linewidth=2, color='limegreen', label='corrected')\n    \n    # further straighten the line segments\n    # def straight(xn, yn, VWpts):\n    #     indices = [i for i in range(0, len(xn)) if (xn[i], yn[i]) in VWpts]\n    #     log.info(indices)\n    #     for i, j in enumerate(indices):\n    #         if i < (len(indices) - 1):\n    #             if indices[i + 1] > j + 1:\n    #                 dx = abs(xn[j] - xn[indices[i + 1]])\n    #                 dy = abs(yn[j] - yn[indices[i + 1]])\n    #                 if dx > dy:\n    #                     seg_y = yn[j:indices[i + 1] + 1]\n    #                     for k in range(j, indices[i + 1] + 1):\n    #                         yn[k] = min(seg_y)\n    #     return yn\n    \n    def straight(xn, yn, VWpts):\n        indices = [i for i in range(0, len(xn)) if (xn[i], yn[i]) in VWpts]\n        xn_new = []\n        yn_new = []\n        # make all line segments horizontal or vertical\n        for index in range(len(indices) - 1):\n            i = indices[index]\n            j = indices[index + 1]\n            ymin = min(yn[i:j + 1])\n            xn_new.extend([xn[i], xn[j]])\n            yn_new.extend([ymin, ymin])\n        # shift horizontal lines down if the preceding horizontal line has a lower y value\n        # but only if the shift is less than the tolerance\n        tolerance = 15\n        for i in range(len(xn_new) - 2):\n            if yn_new[i] == yn_new[i + 1]:\n                if yn_new[i] < yn_new[i + 2] and abs(yn_new[i] - yn_new[i + 2]) < tolerance:\n                    yn_new[i + 2] = yn_new[i]\n                    yn_new[i + 3] = yn_new[i]\n                elif (yn_new[i] > yn_new[i + 2]) \\\n                        and (yn_new[i + 2] == yn_new[i + 3]) \\\n                        and abs(yn_new[i] - yn_new[i + 2]) < tolerance:\n                    yn_new[i] = yn_new[i + 2]\n                    yn_new[i + 1] = yn_new[i + 2]\n        return xn_new, yn_new\n    \n    if straighten:\n        xn, yn = straight(xn, yn, VWpts)\n        if plot:\n            plt.plot(xn, yn, linewidth=2, color='m', label='straightened')\n    if plot:\n        plt.legend()\n        plt.xlabel('row')\n        plt.ylabel('column')\n        plt.show()\n    return np.interp(x, xn, yn).astype(int)\n"
  },
  {
    "path": "pyroSAR/S1/polysimplify.py",
    "content": "\"\"\"\nVisvalingam-Whyatt method of poly-line vertex reduction\nVisvalingam, M and Whyatt J D (1993)\n\"Line Generalisation by Repeated Elimination of Points\", Cartographic J., 30 (1), 46 - 51\nDescribed here:\nhttps://web.archive.org/web/20100428020453/http://www2.dcs.hull.ac.uk/CISRG/publications/DPs/DP10/DP10.html\n=========================================\nThe MIT License (MIT)\nCopyright (c) 2014 Elliot Hallmark\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n================================\n\ncode was obtained from https://github.com/Permafacture/Py-Visvalingam-Whyatt/blob/master/polysimplify.py\nminor edits for Python3 compatibility by John Truckenbrodt 2019\n\"\"\"\n\nfrom numpy import array, argmin\nimport numpy as np\n\nimport logging\nlog = logging.getLogger(__name__)\n\n\ndef triangle_area(p1, p2, p3):\n    \"\"\"\n    calculates the area of a triangle given its vertices\n    \"\"\"\n    return abs(p1[0] * (p2[1] - p3[1]) + p2[0] * (p3[1] - p1[1]) + p3[0] * (p1[1] - p2[1])) / 2.\n\n\ndef triangle_areas_from_array(arr):\n    \"\"\"\n    take an (N,2) array of points and return an (N,1)\n    array of the areas of those triangles, where the first\n    and last areas are np.inf\n    see triangle_area for algorithm\n    \"\"\"\n\n    result = np.empty((len(arr),), arr.dtype)\n    result[0] = np.inf\n    result[-1] = np.inf\n\n    p1 = arr[:-2]\n    p2 = arr[1:-1]\n    p3 = arr[2:]\n\n    # an accumulators to avoid unnecessary intermediate arrays\n    accr = result[1:-1]  # Accumulate directly into result\n    acc1 = np.empty_like(accr)\n\n    np.subtract(p2[:, 1], p3[:, 1], out=accr)\n    np.multiply(p1[:, 0], accr, out=accr)\n    np.subtract(p3[:, 1], p1[:, 1], out=acc1)\n    np.multiply(p2[:, 0], acc1, out=acc1)\n    np.add(acc1, accr, out=accr)\n    np.subtract(p1[:, 1], p2[:, 1], out=acc1)\n    np.multiply(p3[:, 0], acc1, out=acc1)\n    np.add(acc1, accr, out=accr)\n    np.abs(accr, out=accr)\n    accr /= 2.\n    # Notice: accr was writing into result, so the answer is in there\n    return result\n\n\n# the final value in thresholds is np.inf, which will never be\n# the min value.  So, I am safe in \"deleting\" an index by\n# just shifting the array over on top of it\ndef remove(s, i):\n    \"\"\"\n    Quick trick to remove an item from a numpy array without\n    creating a new object.  Rather than the array shape changing,\n    the final value just gets repeated to fill the space.\n    ~3.5x faster than numpy.delete\n    \"\"\"\n    s[i:-1] = s[i + 1:]\n\n\nclass VWSimplifier(object):\n    def __init__(self, pts):\n        \"\"\"\n        Initialize with points. takes some time to build\n        the thresholds but then all threshold filtering later\n        is ultra fast\n        \"\"\"\n        self.pts = np.array(pts)\n        self.thresholds = self.build_thresholds()\n        self.ordered_thresholds = sorted(self.thresholds, reverse=True)\n\n    def build_thresholds(self):\n        \"\"\"\n        compute the area value of each vertex, which one would\n        use to mask an array of points for any threshold value.\n        returns a numpy.array (length of pts)  of the areas.\n        \"\"\"\n        pts = self.pts\n        nmax = len(pts)\n        real_areas = triangle_areas_from_array(pts)\n        real_indices = list(range(nmax))\n\n        # destructable copies\n        # ARG! areas=real_areas[:] doesn't make a copy!\n        areas = np.copy(real_areas)\n        i = real_indices[:]\n\n        # pick first point and set up for loop\n        min_vert = int(argmin(areas))\n        this_area = areas[min_vert]\n        #  areas and i are modified for each point finished\n        remove(areas, min_vert)  # faster\n        # areas = np.delete(areas,min_vert) #slower\n        real_idx = i.pop(min_vert)\n\n        # cntr = 3\n        while this_area < np.inf:\n            '''min_vert was removed from areas and i.  Now,\n            adjust the adjacent areas and remove the new\n            min_vert.\n            Now that min_vert was filtered out, min_vert points\n            to the point after the deleted point.'''\n\n            skip = False  # modified area may be the next minvert\n\n            try:\n                right_area = triangle_area(pts[i[min_vert - 1]],\n                                           pts[i[min_vert]], pts[i[min_vert + 1]])\n            except IndexError:\n                # trying to update area of endpoint. Don't do it\n                pass\n            else:\n                right_idx = i[min_vert]\n                if right_area <= this_area:\n                    # even if the point now has a smaller area,\n                    # it ultimately is not more significant than\n                    # the last point, which needs to be removed\n                    # first to justify removing this point.\n                    # Though this point is the next most significant\n                    right_area = this_area\n\n                    # min_vert refers to the point to the right of\n                    # the previous min_vert, so we can leave it\n                    # unchanged if it is still the min_vert\n                    skip = min_vert\n\n                # update both collections of areas\n                real_areas[right_idx] = right_area\n                areas[min_vert] = right_area\n\n            if min_vert > 1:\n                # cant try/except because 0-1=-1 is a valid index\n                left_area = triangle_area(pts[i[min_vert - 2]],\n                                          pts[i[min_vert - 1]], pts[i[min_vert]])\n                if left_area <= this_area:\n                    # same justification as above\n                    left_area = this_area\n                    skip = min_vert - 1\n                real_areas[i[min_vert - 1]] = left_area\n                areas[min_vert - 1] = left_area\n\n            # only argmin if we have too.\n            min_vert = skip or argmin(areas)\n            real_idx = i.pop(min_vert)\n            this_area = areas[min_vert]\n            # areas = np.delete(areas,min_vert) #slower\n            remove(areas, min_vert)  # faster\n            '''if sum(np.where(areas==np.inf)[0]) != sum(list(reversed(range(len(areas))))[:cntr]):\n              print \"broke:\",np.where(areas==np.inf)[0],cntr\n              break\n            cntr+=1\n            #if real_areas[0]<np.inf or real_areas[-1]<np.inf:\n            #  print \"NO!\", real_areas[0], real_areas[-1]\n            '''\n        return real_areas\n\n    def from_threshold(self, threshold):\n        return self.pts[self.thresholds >= threshold]\n\n    def from_number(self, n):\n        thresholds = self.ordered_thresholds\n        try:\n            threshold = thresholds[int(n)]\n        except IndexError:\n            return self.pts\n        return self.pts[self.thresholds > threshold]\n\n    def from_ratio(self, r):\n        if r <= 0 or r > 1:\n            raise ValueError(\"Ratio must be 0<r<=1\")\n        else:\n            return self.from_number(r * len(self.thresholds))\n\n\nclass WKTSimplifier(VWSimplifier):\n    \"\"\"\n    VWSimplifier that returns strings suitable for WKT creation\n    \"\"\"\n\n    def __init__(self, *args, **kwargs):\n        if 'precision' in kwargs:\n            p = kwargs.pop('precision')\n        else:\n            p = None\n        VWSimplifier.__init__(self, *args, **kwargs)\n        self.set_precision(p)\n\n    def set_precision(self, precision):\n        if precision:\n            self.pts_as_strs = self.pts.astype('S%s' % precision)\n        else:\n            self.pts_as_strs = self.pts.astype(str)\n\n    '''slow\n    def from_threshold(self,threshold,precision=None):\n        arr = np.array2string(self.pts[self.thresholds > threshold],precision=precision)\n        return arr.replace('[[ ','(').replace(']]',')').replace(']\\n [ ',',')\n    '''\n\n    def wkt_from_threshold(self, threshold, precision=None):\n        if precision:\n            self.set_precision(precision)\n        pts = self.pts_as_strs[self.thresholds >= threshold]\n        return '(%s)' % ','.join(['%s %s' % (x, y) for x, y in pts])\n\n    def wkt_from_number(self, n, precision=None):\n        thresholds = self.ordered_thresholds\n        if n < 3: n = 3  # For polygons. TODO something better\n        try:\n            threshold = thresholds[int(n)]\n        except IndexError:\n            threshold = 0\n\n        return self.wkt_from_threshold(threshold, precision=precision)\n\n    def wkt_from_ratio(self, r, precision=None):\n        if r <= 0 or r > 1:\n            raise ValueError(\"Ratio must be 0<r<=1\")\n        else:\n            return self.wkt_from_number(r * len(self.thresholds))\n\n\ntry:\n    from django.contrib.gis.gdal import OGRGeometry, OGRException\n    from django.contrib.gis.geos import GEOSGeometry, fromstr\nexcept ImportError:\n    class GDALSimplifier(object):\n        \"\"\"\n        Dummy object that would be replaced by a real one if correct module exists\n        \"\"\"\n\n        def __init__(*args, **kwargs):\n            log.info(\"\"\"\n                  django.contrib.gis.gdal not found.\n                  GDALSimplifier not available.\n                  \"\"\")\nelse:\n    from json import loads\n    import re\n\n    p = re.compile('([ 0123456789.]+) ([0123456789.]+)')\n\n\n    class GDALSimplifier(object):\n        \"\"\"\n        Warning, there is a slight loss of precision just in the\n        conversion from geometry object to numpy.array even if no\n        threshold is applied.  ie:\n\n        originalpolygeom.area   ->   413962.65495176613\n        gdalsimplifierpoly.area ->   413962.65495339036\n        \"\"\"\n\n        def __init__(self, geom, precision=None, return_GDAL=True):\n            \"\"\"\n            accepts a gdal.OGRGeometry or geos.GEOSGeometry\n            object and wraps multiple\n            VWSimplifiers.  set return_GDAL to False for faster\n            filtering with arrays of floats returned instead of\n            geometry objects.\n            \"\"\"\n            global p\n            self.return_GDAL = return_GDAL\n            if isinstance(geom, OGRGeometry):\n                name = geom.geom_name\n                self.Geometry = lambda w: OGRGeometry(w, srs=geom.srs)\n                self.pts = np.array(geom.tuple)\n            elif isinstance(geom, GEOSGeometry):\n                name = geom.geom_type.upper()\n                self.Geometry = lambda w: fromstr(w)\n                self.pts = np.array(geom.tuple)\n            elif isinstance(geom, str) or isinstance(geom, bytes):\n                # assume wkt\n                # for WKT\n                def str2tuple(q):\n                    return '(%s,%s)' % (q.group(1), q.group(2))\n\n                self.return_GDAL = False  # don't even try\n                self.Geometry = lambda w: w  # this will never be used\n                name, pts = geom.split(' ', 1)\n                self.pts = loads(p.sub(str2tuple, pts). \\\n                                 replace('(', '[').replace(')', ']'))\n            self.precision = precision\n            if name == 'LINESTRING':\n                self.maskfunc = self.linemask\n                self.buildfunc = self.linebuild\n                self.fromnumfunc = self.notimplemented\n            elif name == \"POLYGON\":\n                self.maskfunc = self.polymask\n                self.buildfunc = self.polybuild\n                self.fromnumfunc = self.notimplemented\n            elif name == \"MULTIPOLYGON\":\n                self.maskfunc = self.multimask\n                self.buildfunc = self.multibuild\n                self.fromnumfunc = self.notimplemented\n            else:\n                raise RuntimeError(\"\"\"\n             Only types LINESTRING, POLYGON and MULTIPOLYGON\n             supported, but got %s\"\"\" % name)\n            # sets self.simplifiers to a list of VWSimplifiers\n            self.buildfunc()\n\n        # rather than concise, I'd rather be explicit and clear.\n\n        def pt2str(self, pt):\n            \"\"\"make length 2 numpy.array.__str__() fit for wkt\"\"\"\n            return ' '.join(pt)\n\n        def linebuild(self):\n            self.simplifiers = [WKTSimplifier(self.pts)]\n\n        def line2wkt(self, pts):\n            return u'LINESTRING %s' % pts\n\n        def linemask(self, threshold):\n            get_pts = self.get_pts\n            pts = get_pts(self.simplifiers[0], threshold)\n            if self.return_GDAL:\n                return self.Geometry(self.line2wkt(pts))\n            else:\n                return pts\n\n        def polybuild(self):\n            list_of_pts = self.pts\n            result = []\n            for pts in list_of_pts:\n                result.append(WKTSimplifier(pts))\n            self.simplifiers = result\n\n        def poly2wkt(self, list_of_pts):\n            return u'POLYGON (%s)' % ','.join(list_of_pts)\n\n        def polymask(self, threshold):\n            get_pts = self.get_pts\n            sims = self.simplifiers\n            list_of_pts = [get_pts(sim, threshold) for sim in sims]\n            if self.return_GDAL:\n                return self.Geometry(self.poly2wkt(list_of_pts))\n            else:\n                return array(list_of_pts)\n\n        def multibuild(self):\n            list_of_list_of_pts = self.pts\n            result = []\n            for list_of_pts in list_of_list_of_pts:\n                subresult = []\n                for pts in list_of_pts:\n                    subresult.append(WKTSimplifier(pts))\n                result.append(subresult)\n            self.simplifiers = result\n\n        def multi2wkt(self, list_of_list_of_pts):\n            outerlist = []\n            for list_of_pts in list_of_list_of_pts:\n                outerlist.append('(%s)' % ','.join(list_of_pts))\n            return u'MULTIPOLYGON (%s)' % ','.join(outerlist)\n\n        def multimask(self, threshold):\n            loflofsims = self.simplifiers\n            result = []\n            get_pts = self.get_pts\n            if self.return_GDAL:\n                ret_func = lambda r: self.Geometry(self.multi2wkt(r))\n            else:\n                ret_func = lambda r: r\n            for list_of_simplifiers in loflofsims:\n                subresult = []\n                for simplifier in list_of_simplifiers:\n                    subresult.append(get_pts(simplifier, threshold))\n                result.append(subresult)\n            return ret_func(result)\n\n        def notimplemented(self, n):\n            log.info('This function is not yet implemented')\n\n        def from_threshold(self, threshold):\n            precision = self.precision\n            if self.return_GDAL:\n                self.get_pts = lambda obj, t: obj.wkt_from_threshold(t, precision)\n            else:\n                self.get_pts = lambda obj, t: obj.from_threshold(t)\n            return self.maskfunc(threshold)\n\n        def from_number(self, n):\n            precision = self.precision\n            if self.return_GDAL:\n                self.get_pts = lambda obj, t: obj.wkt_from_number(t, precision)\n            else:\n                self.get_pts = lambda obj, t: obj.from_number(t)\n            return self.maskfunc(n)\n\n        def from_ratio(self, r):\n            precision = self.precision\n            if self.return_GDAL:\n                self.get_pts = lambda obj, t: obj.wkt_from_ratio(t, precision)\n            else:\n                self.get_pts = lambda obj, t: obj.from_ratio(t)\n            return self.maskfunc(r)\n\n\ndef fancy_parametric(k):\n    \"\"\"\n    good k's: .33,.5,.65,.7,1.3,1.4,1.9,3,4,5\n    \"\"\"\n    cos = np.cos\n    sin = np.sin\n    xt = lambda t: (k - 1) * cos(t) + cos(t * (k - 1))\n    yt = lambda t: (k - 1) * sin(t) - sin(t * (k - 1))\n    return xt, yt\n\n\nif __name__ == \"__main__\":\n    from time import time\n\n    n = 5000\n    thetas = np.linspace(0, 16 * np.pi, n)\n    xt, yt = fancy_parametric(1.4)\n    pts = np.array([[xt(t), yt(t)] for t in thetas])\n    start = time()\n    simplifier = VWSimplifier(pts)\n    pts = simplifier.from_number(1000)\n    end = time()\n    log.info(\"%s vertices removed in %02f seconds\" % (n - len(pts), end - start))\n\n    import matplotlib\n\n    matplotlib.use('AGG')\n    import matplotlib.pyplot as plot\n\n    plot.plot(pts[:, 0], pts[:, 1], color='r')\n    plot.savefig('visvalingam.png')\n    log.info(\"saved visvalingam.png\")\n    # plot.show()\n"
  },
  {
    "path": "pyroSAR/__init__.py",
    "content": "from .drivers import *\nfrom .archive import Archive, drop_archive\nfrom . import ancillary, drivers\n\nfrom importlib.metadata import version, PackageNotFoundError\n\ntry:\n    __version__ = version(__name__)\nexcept PackageNotFoundError:\n    # package is not installed\n    pass\n"
  },
  {
    "path": "pyroSAR/ancillary.py",
    "content": "###############################################################################\n# ancillary routines for software pyroSAR\n\n# Copyright (c) 2014-2026, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\n\"\"\"\nThis module gathers central functions and classes for general pyroSAR applications.\n\"\"\"\nimport os\nimport re\nimport time\nimport uuid\nfrom pathlib import Path\nfrom math import sin, radians\nimport inspect\nfrom datetime import datetime\nfrom . import patterns\nfrom spatialist.ancillary import finder\nfrom dataclasses import dataclass\nfrom typing import Optional, Literal, Callable, Any\ntry:\n    from typing import Self\nexcept ImportError:\n    # Python < 3.11\n    from typing_extensions import Self\nfrom types import TracebackType\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef groupby(\n        images: list[str],\n        attribute: str\n) -> list[list[str]]:\n    \"\"\"\n    group a list of images by a metadata attribute\n    \n    Parameters\n    ----------\n    images:\n        the names of the images to be sorted\n    attribute:\n        the name of the attribute used for sorting;\n        see :func:`parse_datasetname` for options\n    \n    Returns\n    -------\n        a list of sub-lists containing the grouped images\n    \"\"\"\n    images_sort = sorted(images, key=lambda x: re.search(patterns.pyrosar, x).group(attribute))\n    out_meta = [[parse_datasetname(images_sort.pop(0))]]\n    while len(images_sort) > 0:\n        filename = images_sort.pop(0)\n        meta = parse_datasetname(filename)\n        \n        if out_meta[-1][0][attribute] == meta[attribute]:\n            out_meta[-1].append(meta)\n        else:\n            out_meta.append([meta])\n    out = [[x['filename'] for x in y] for y in out_meta]\n    return out\n\n\ndef groupbyTime(\n        images: list[str],\n        function: Callable[[str], Any],\n        time: int | float\n) -> list[list[str]]:\n    \"\"\"\n    function to group images by their acquisition time difference\n\n    Parameters\n    ----------\n    images:\n        a list of image names\n    function:\n        a function to derive the time from the image names; see e.g. :func:`seconds`\n    time:\n        a time difference in seconds by which to group the images\n\n    Returns\n    -------\n        a list of sub-lists containing the grouped images\n    \"\"\"\n    # sort images by time stamp\n    srcfiles = sorted(images, key=function)\n    \n    groups = [[srcfiles[0]]]\n    group = groups[0]\n    \n    for i in range(1, len(srcfiles)):\n        item = srcfiles[i]\n        timediff = abs(function(item) - function(group[-1]))\n        if timediff <= time:\n            group.append(item)\n        else:\n            groups.append([item])\n            group = groups[-1]\n    return [x[0] if len(x) == 1 else x for x in groups]\n\n\ndef multilook_factors(\n        source_rg: int | float,\n        source_az: int | float,\n        target: int | float,\n        geometry: Literal[\"SLANT_RANGE\", \"GROUND_RANGE\"],\n        incidence: int | float\n) -> tuple[int, int]:\n    \"\"\"\n    Compute multi-looking factors. A square pixel is approximated with\n    defined target ground range pixel spacing. The function computes a\n    cost for multilook factor combinations based on the difference between\n    the resulting spacing and the target spacing for range and azimuth\n    respectively and the difference between range and azimuth spacing.\n    Based on this cost, the optimal multilook factors are chosen.\n    Each of the three criteria is weighted equally.\n    \n    Parameters\n    ----------\n    source_rg:\n        the range pixel spacing\n    source_az:\n        the azimuth pixel spacing\n    target:\n        the target pixel spacing of an approximately square pixel\n    geometry:\n        the imaging geometry; either 'SLANT_RANGE' or 'GROUND_RANGE'\n    incidence:\n        the angle of incidence in degrees\n\n    Returns\n    -------\n        the multi-looking factors as (range looks, azimuth looks)\n    \n    Examples\n    --------\n    >>> from pyroSAR.ancillary import multilook_factors\n    >>> rlks, azlks = multilook_factors(source_rg=2, source_az=13, target=10,\n    >>>                                 geometry='SLANT_RANGE', incidence=39)\n    >>> print(rlks, azlks)\n    4 1\n    \"\"\"\n    \n    @dataclass\n    class MultilookResult:\n        rglks: int\n        azlks: int\n        cost: float\n    \n    sp_az = source_az\n    if geometry == 'SLANT_RANGE':\n        sp_rg = source_rg / sin(radians(incidence))\n    elif geometry == 'GROUND_RANGE':\n        sp_rg = source_rg\n    else:\n        raise ValueError(\"parameter 'geometry' must be either \"\n                         \"'SLANT_RANGE' or 'GROUND_RANGE'\")\n    sp_target = max(sp_az, sp_rg, target)\n    \n    # determine initial ML factors\n    rglks_init = int(round(sp_target / sp_rg))\n    azlks_init = int(round(sp_target / sp_az))\n    \n    best: Optional[MultilookResult] = None\n    \n    # weights for the distance criteria\n    w_rg = 1.\n    w_az = 1.\n    w_sq = 1.\n    \n    # iterate over some range of ML factors to find the best\n    # combination.\n    for rglks in range(1, rglks_init + 6):\n        sp_rg_out = sp_rg * rglks\n        \n        for azlks in range(1, azlks_init + 6):\n            sp_az_out = sp_az * azlks\n            \n            # compute distances and cost\n            d_rg = abs(sp_rg_out - sp_target)\n            d_az = abs(sp_az_out - sp_target)\n            d_sq = abs(sp_rg_out - sp_az_out)\n            \n            cost = w_rg * d_rg + w_az * d_az + w_sq * d_sq\n            \n            candidate = MultilookResult(\n                rglks=rglks,\n                azlks=azlks,\n                cost=cost,\n            )\n            if best is None:\n                best = candidate\n            else:\n                # primary: minimize cost\n                if candidate.cost < best.cost:\n                    best = candidate\n                # secondary: minimize rglks+azlks\n                elif candidate.cost == best.cost:\n                    if (candidate.rglks + candidate.azlks) < (best.rglks + best.azlks):\n                        best = candidate\n    rglks = best.rglks\n    azlks = best.azlks\n    \n    log.debug(f'ground range spacing: ({sp_rg * rglks}, {sp_az * azlks})')\n    return rglks, azlks\n\n\ndef seconds(filename: str) -> float:\n    \"\"\"\n    function to extract time in seconds from a file name.\n    the format must follow a fixed pattern: YYYYmmddTHHMMSS\n    Images processed with pyroSAR functionalities via module snap or gamma will contain this information.\n\n    Parameters\n    ----------\n    filename:\n        the name of a file from which to extract the time from\n\n    Returns\n    -------\n        the difference between the time stamp in filename and Jan 01 1900 in seconds\n    \"\"\"\n    # return mktime(strptime(re.findall('[0-9T]{15}', filename)[0], '%Y%m%dT%H%M%S'))\n    td = datetime.strptime(re.findall('[0-9T]{15}', filename)[0], '%Y%m%dT%H%M%S') - datetime(1900, 1, 1)\n    return td.total_seconds()\n\n\ndef parse_datasetname(\n        name: str,\n        parse_date: bool = False\n) -> dict[str, str | None | list[str] | datetime] | None:\n    \"\"\"\n    Parse the name of a pyroSAR processing product\n    \n    Parameters\n    ----------\n    name:\n        the name of the file to be parsed\n    parse_date:\n        parse the start date to a :class:`~datetime.datetime`\n        object or just return the string?\n    \n    Returns\n    -------\n        the metadata attributes parsed from the file name or\n        None if the file name does not match the pattern.\n    \n    Examples\n    --------\n    >>> meta = parse_datasetname('S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif')\n    >>> print(sorted(meta.keys()))\n    ['acquisition_mode', 'extensions', 'filename', 'orbit',\n    'outname_base', 'polarization', 'proc_steps', 'sensor', 'start']\n    \"\"\"\n    \n    filename = os.path.abspath(name) if os.path.isfile(name) else name\n    \n    match = re.match(re.compile(patterns.pyrosar), filename)\n    if not match:\n        return\n    out = match.groupdict()\n    if out['extensions'] == '':\n        out['extensions'] = None\n    if out['proc_steps'] is not None:\n        out['proc_steps'] = out['proc_steps'].split('_')\n    if parse_date:\n        out['start'] = datetime.strptime(out['start'], '%Y%m%dT%H%M%S')\n    out['filename'] = filename\n    out['outname_base'] = out['outname_base'].strip('_')\n    return out\n\n\ndef find_datasets(\n        directory: str,\n        recursive: bool = False,\n        **kwargs\n) -> list[str]:\n    \"\"\"\n    find pyroSAR datasets in a directory based on their metadata\n    \n    Parameters\n    ----------\n    directory:\n        the name of the directory to be searched\n    recursive:\n        search the directory recursively into subdirectories?\n    kwargs:\n        Metadata attributes for filtering the scene list supplied as `key=value`. e.g. `sensor='S1A'`.\n        Multiple allowed options can be provided in tuples, e.g. `sensor=('S1A', 'S1B')`.\n        Any types other than tuples require an exact match, e.g. `proc_steps=['grd', 'mli', 'geo', 'norm', 'db']`\n        will be matched only if these processing steps are contained in the product name in this exact order.\n        The special attributes `start` and `stop` can be used for time filtering where `start<=value<=stop`.\n        See function :func:`parse_datasetname` for further options.\n    \n    Returns\n    -------\n        the file names found in the directory and filtered by metadata attributes\n    \n    Examples\n    --------\n    >>> selection = find_datasets('path/to/files', sensor=('S1A', 'S1B'), polarization='VV')\n    \"\"\"\n    files = finder(directory, [patterns.pyrosar], regex=True, recursive=recursive)\n    selection = []\n    for file in files:\n        meta = parse_datasetname(file)\n        matches = []\n        for key, val in kwargs.items():\n            if key == 'start':\n                match = val <= meta['start']\n            elif key == 'stop':\n                match = val >= meta['start']  # only the start time stamp is contained in the filename\n            elif isinstance(val, tuple):\n                match = meta[key] in val\n            else:\n                match = meta[key] == val\n            matches.append(match)\n        if all(matches):\n            selection.append(file)\n    return selection\n\n\ndef getargs(func: Callable[..., Any]) -> list[str]:\n    \"\"\"\n    get the arguments of a function\n    \n    Parameters\n    ----------\n    func:\n        the function to be checked\n\n    Returns\n    -------\n        the argument names\n    \"\"\"\n    return sorted(inspect.getfullargspec(func).args)\n\n\ndef hasarg(func: Callable[..., Any], arg: str) -> bool:\n    \"\"\"\n    simple check whether a function takes a parameter as input\n    \n    Parameters\n    ----------\n    func:\n        the function to be checked\n    arg:\n        the argument name to be found\n\n    Returns\n    -------\n        does the function take this as argument?\n    \"\"\"\n    return arg in getargs(func)\n\n\ndef windows_fileprefix(\n        func: Callable[[str], object],\n        path: str,\n        exc_info: tuple[type[BaseException], BaseException, TracebackType | None],\n) -> None:\n    \"\"\"\n    Helper function for :func:`shutil.rmtree` to exceed Windows' file name length limit of 256 characters.\n    See `here <https://stackoverflow.com/questions/36219317/pathname-too-long-to-open>`_ for details.\n\n    Parameters\n    ----------\n    func:\n        the function to be executed, i.e. :func:`shutil.rmtree`\n    path:\n        the path to be deleted\n    exc_info:\n        execution info as returned by :func:`sys.exc_info`\n\n    Returns\n    -------\n\n    Examples\n    --------\n    >>> import shutil\n    >>> from pyroSAR.ancillary import windows_fileprefix\n    >>> shutil.rmtree('/path', onerror=windows_fileprefix)\n    \"\"\"\n    func(u'\\\\\\\\?\\\\' + path)\n\n\nclass Lock(object):\n    \"\"\"\n    File and folder locking mechanism.\n    This mechanism creates lock files indicating whether a file/folder\n    \n     1. is being modified (`target`.lock),\n     2. is being used/read (`target`.used_<uuid.uuid4>) or\n     3. was damaged during modification (`target`.error).\n    \n    Although these files will not prevent locking by other mechanisms (UNIX\n    locks are generally only advisory), this mechanism is respected across\n    any running instances. I.e., if such a lock file exists, no process\n    trying to acquire a lock using this class will succeed if a lock file\n    intending to prevent it exists. This was implemented because other existing\n    solutions like `filelock <https://github.com/tox-dev/filelock>`_ or\n    `fcntl <https://docs.python.org/3/library/fcntl.html>`_ do not implement\n    effective solutions for parallel jobs in HPC systems.\n    \n    Hard locks prevent any usage of the data. Damage/error locks work like hard\n    locks except that `timeout` is ignored and a `RuntimeError` is raised immediately.\n    Error locks are created if an error occurs whilst a hard lock is acquired and\n    `target` exists (by renaming the hard lock file).\n    Infinite usage locks may exist, each with a different random UUID. No hard\n    lock may be acquired whilst usage locks exist. On error usage locks are simply\n    deleted.\n    \n    The class supports nested locks. One function might lock a file, and another\n    function called in the same process will reuse this lock if it tries to lock\n    the file.\n    \n    It may happen that lock files remain when a process is killed by HPC schedulers\n    like Slurm because in this case the process is not ended by Python. Optimally,\n    hard locks should be renamed to error lock files and usage lock files should be\n    deleted. This has to be done separately.\n    \n    Examples\n    --------\n    >>> from pyroSAR.ancillary import Lock\n    >>> target = 'test.txt'\n    >>> with Lock(target=target):\n    >>>     with open(target, 'w') as f:\n    >>>         f.write('Hello World!')\n    \n    >>> with Lock(target=target):  # initialize lock\n    >>>     with Lock(target=target):  # reuse lock\n    >>>         with open(target, 'w') as f:\n    >>>             f.write('Hello World!')\n\n    Parameters\n    ----------\n    target:\n        the file/folder to lock\n    soft:\n        lock the file/folder only for reading (and not for modification)?\n    timeout:\n        the time in seconds to retry acquiring a lock\n    \"\"\"\n    _instances = {}\n    _nesting_levels = {}\n    \n    def __new__(\n            cls,\n            target: str,\n            soft: bool = False,\n            timeout: int = 7200\n    ) -> Self:\n        target_abs = os.path.abspath(os.path.expanduser(target))\n        if target_abs not in cls._instances:\n            log.debug(f'creating lock instance for target {target_abs}')\n            instance = super().__new__(cls)\n            cls._instances[target_abs] = instance\n            cls._nesting_levels[target_abs] = 0\n        else:\n            if soft != cls._instances[target_abs].soft:\n                msg = 'cannot place nested {}-lock on existing {}-lock for target {}'\n                vals = ['read', 'write'] if soft else ['write', 'read']\n                vals.append(target_abs)\n                raise RuntimeError(msg.format(*vals))\n            log.debug(f'reusing lock instance for target {target_abs}')\n        return cls._instances[target_abs]\n    \n    def __init__(\n            self,\n            target: str,\n            soft: bool = False,\n            timeout: int = 7200\n    ) -> None:\n        if not hasattr(self, '_initialized'):\n            self.target = os.path.abspath(os.path.expanduser(target))\n            used_id = str(uuid.uuid4())\n            self.lock = self.target + '.lock'\n            self.error = self.target + '.error'\n            self.used = self.target + f'.used_{used_id}'\n            self.soft = soft\n            if os.path.isfile(self.error):\n                msg = 'cannot acquire lock on damaged target: {}'\n                raise RuntimeError(msg.format(self.target))\n            end = time.time() + timeout\n            log.debug(f'trying to {\"read\" if self.soft else \"write\"}-lock {target}')\n            while True:\n                if time.time() > end:\n                    msg = 'could not acquire lock due to timeout: {}'\n                    raise RuntimeError(msg.format(self.target))\n                try:\n                    if self.soft and not os.path.isfile(self.lock):\n                        Path(self.used).touch(exist_ok=False)\n                        break\n                    if not self.soft and not self.is_used():\n                        Path(self.lock).touch(exist_ok=False)\n                        break\n                except FileExistsError:\n                    pass\n                time.sleep(1)\n            log.debug(f'acquired {\"read\" if self.soft else \"write\"}-lock on {target}')\n            self._initialized = True\n        Lock._nesting_levels[self.target] += 1\n    \n    def __enter__(self) -> Self:\n        return self\n    \n    def __exit__(\n            self,\n            exc_type: type[BaseException] | None,\n            exc_value: BaseException | None,\n            traceback: TracebackType | None,\n    ) -> None:\n        self.remove(exc_type)\n    \n    def is_used(self) -> bool:\n        \"\"\"\n        Does any usage lock exist?\n        \"\"\"\n        base = os.path.basename(self.target)\n        folder = os.path.dirname(self.target)\n        files = list(Path(folder).glob(base + '.used*'))\n        return len(files) > 0\n    \n    def remove(\n            self,\n            exc_type: type[BaseException] | None = None\n    ) -> None:\n        \"\"\"\n        Remove the acquired soft/hard lock or rename it to an error lock.\n        \"\"\"\n        Lock._nesting_levels[self.target] -= 1\n        if Lock._nesting_levels[self.target] == 0:\n            if not self.soft and exc_type is not None and os.path.exists(self.target):\n                os.rename(self.lock, self.error)\n                log.debug(f'placed error-lock on {self.target}')\n            else:\n                if self.soft:\n                    os.remove(self.used)\n                else:\n                    os.remove(self.lock)\n                msg_sub = \"read\" if self.soft else \"write\"\n                log.debug(f'removed {msg_sub}-lock on {self.target}')\n            del Lock._instances[self.target]\n            del Lock._nesting_levels[self.target]\n        else:\n            log.debug(f'decrementing lock level on {self.target}')\n\n\nclass LockCollection(object):\n    \"\"\"\n    Like :class:`Lock` but for multiple files/folders.\n\n    Parameters\n    ----------\n    targets:\n        the files/folders to lock\n    soft:\n        lock the files/folders only for reading (and not for modification)?\n    timeout:\n        the time in seconds to retry acquiring a lock\n    \"\"\"\n    \n    def __init__(\n            self,\n            targets: list[str],\n            soft: bool = False,\n            timeout: int = 7200\n    ):\n        self.locks = [Lock(x, soft=soft, timeout=timeout) for x in targets]\n    \n    def __enter__(self) -> Self:\n        return self\n    \n    def __exit__(\n            self,\n            exc_type: type[BaseException] | None,\n            exc_value: BaseException | None,\n            traceback: TracebackType | None,\n    ) -> None:\n        for lock in reversed(self.locks):\n            lock.__exit__(exc_type, exc_value, traceback)\n"
  },
  {
    "path": "pyroSAR/archive.py",
    "content": "###############################################################################\n# Scene database tools for pyroSAR\n# Copyright (c) 2016-2026, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\nfrom __future__ import annotations\n\nimport os\nimport re\nimport gc\nimport shutil\nimport sys\nimport socket\nimport time\nimport platform\nimport logging\nimport csv\nimport inspect\nfrom datetime import datetime\n\nimport progressbar as pb\n\nfrom types import TracebackType\nfrom typing import Any, Protocol, runtime_checkable, Literal\n\nfrom osgeo import gdal\n\nfrom spatialist import sqlite3\nfrom spatialist.vector import Vector\nfrom spatialist.ancillary import finder\n\nfrom pyroSAR.drivers import identify, identify_many, ID\n\nfrom sqlalchemy import create_engine, Table, MetaData, Column, Integer, String, exc\nfrom sqlalchemy import inspect as sql_inspect\nfrom sqlalchemy.event import listen\nfrom sqlalchemy.orm import sessionmaker\nfrom sqlalchemy.sql import select, func\nfrom sqlalchemy.engine.url import URL\nfrom sqlalchemy.ext.automap import automap_base\nfrom sqlalchemy_utils import database_exists, create_database, drop_database\nfrom geoalchemy2 import Geometry\n\nlog = logging.getLogger(__name__)\n\ngdal.UseExceptions()\n\n\n@runtime_checkable\nclass SceneArchive(Protocol):\n    \"\"\"\n    Common interface for scene archive backends.\n\n    Implementations may represent local databases, STAC catalogs, remote APIs,\n    or other scene repositories, but should expose a consistent `select`\n    method and support context-manager usage.\n    \"\"\"\n    \n    def __enter__(self) -> SceneArchive:\n        \"\"\"\n        Enter the archive context.\n        \"\"\"\n        ...\n    \n    def __exit__(\n            self,\n            exc_type: type[BaseException] | None,\n            exc_val: BaseException | None,\n            exc_tb: TracebackType | None,\n    ) -> None:\n        \"\"\"\n        Exit the archive context and release resources if necessary.\n        \"\"\"\n        ...\n    \n    def close(self) -> None:\n        \"\"\"\n        Release open resources.\n\n        Implementations that do not hold resources may implement this as a no-op.\n        \"\"\"\n        ...\n    \n    @staticmethod\n    def select(\n            sensor: str | list[str] | None = None,\n            product: str | list[str] | None = None,\n            acquisition_mode: str | list[str] | None = None,\n            mindate: str | datetime | None = None,\n            maxdate: str | datetime | None = None,\n            vectorobject: Vector | None = None,\n            date_strict: bool = True,\n            return_value: str | list[str] = \"scene\"\n    ) -> list[Any]:\n        \"\"\"\n        Select scenes matching the query parameters.\n\n        Parameters\n        ----------\n        sensor:\n            One sensor or a list of sensors.\n        product:\n            One product type or a list of product types.\n        acquisition_mode:\n            One acquisition mode or a list of acquisition modes.\n        mindate:\n            Minimum acquisition date/time.\n        maxdate:\n            Maximum acquisition date/time.\n        vectorobject:\n            Spatial search geometry.\n        date_strict:\n            Whether date filtering should be strict.\n        return_value:\n            One return field or a list of return fields.\n        **kwargs:\n            Backend-specific optional query arguments.\n\n        Returns\n        -------\n            The query result. Implementations may return a list of scalar values or\n            tuples depending on `return_value`.\n        \"\"\"\n        ...\n\n\nclass Archive(SceneArchive):\n    \"\"\"\n    Utility for storing SAR image metadata in a database\n\n    Parameters\n    ----------\n    dbfile:\n        the filename for the SpatiaLite database. This might either point to an\n        existing database or will be created otherwise. If postgres is set to True,\n        this will be the name for the PostgreSQL database.\n    custom_fields:\n        a dictionary containing additional non-standard database column names and data types;\n        the names must be attributes of the SAR scenes to be inserted (i.e. id.attr) or keys in their meta attribute\n        (i.e. id.meta['attr'])\n    postgres:\n        enable postgres driver for the database. Default: False\n    user:\n        required for postgres driver: username to access the database. Default: 'postgres'\n    password:\n        required for postgres driver: password to access the database. Default: '1234'\n    host:\n        required for postgres driver: host where the database is hosted. Default: 'localhost'\n    port:\n        required for postgres driver: port number to the database. Default: 5432\n    cleanup:\n        check whether all registered scenes exist and remove missing entries?\n    legacy:\n        open an outdated database in legacy mode to import into a new database.\n        Opening an outdated database without legacy mode will throw a RuntimeError.\n\n    Examples\n    ----------\n    Ingest all Sentinel-1 scenes in a directory and its subdirectories into the database:\n\n    >>> from pyroSAR import Archive, identify\n    >>> from spatialist.ancillary import finder\n    >>> dbfile = '/.../scenelist.db'\n    >>> archive_s1 = '/.../sentinel1/GRD'\n    >>> scenes_s1 = finder(archive_s1, [r'^S1.*.zip'], regex=True, recursive=True)\n    >>> with Archive(dbfile) as archive:\n    >>>     archive.insert(scenes_s1)\n\n    select all Sentinel-1 A/B scenes stored in the database, which\n\n     * overlap with a test site\n     * were acquired in Ground-Range-Detected (GRD) Interferometric Wide Swath (IW) mode before 2018\n     * contain a VV polarization image\n     * have not been processed to directory `outdir` before\n\n    >>> from pyroSAR import Archive\n    >>> from spatialist import Vector\n    >>> archive = Archive('/.../scenelist.db')\n    >>> site = Vector('/path/to/site.shp')\n    >>> outdir = '/path/to/processed/results'\n    >>> maxdate = '20171231T235959'\n    >>> selection_proc = archive.select(vectorobject=site, processdir=outdir,\n    >>>                                 maxdate=maxdate, sensor=['S1A', 'S1B'],\n    >>>                                 product='GRD', acquisition_mode='IW', vv=1)\n    >>> archive.close()\n\n    Alternatively, the `with` statement can be used.\n    In this case to just check whether one particular scene is already registered in the database:\n\n    >>> from pyroSAR import identify, Archive\n    >>> scene = identify('S1A_IW_SLC__1SDV_20150330T170734_20150330T170801_005264_006A6C_DA69.zip')\n    >>> with Archive('/.../scenelist.db') as archive:\n    >>>     print(archive.is_registered(scene.scene))\n\n    When providing 'postgres' as driver, a PostgreSQL database will be created at a given host.\n    Additional arguments are required.\n\n    >>> from pyroSAR import Archive, identify\n    >>> from spatialist.ancillary import finder\n    >>> dbfile = 'scenelist_db'\n    >>> archive_s1 = '/.../sentinel1/GRD'\n    >>> scenes_s1 = finder(archive_s1, [r'^S1.*.zip'], regex=True, recursive=True)\n    >>> with Archive(dbfile, driver='postgres', user='user', password='password', host='host', port=5432) as archive:\n    >>>     archive.insert(scenes_s1)\n\n    Importing an old database:\n\n    >>> from pyroSAR import Archive\n    >>> db_new = 'scenes.db'\n    >>> db_old = 'scenes_old.db'\n    >>> with Archive(db_new) as db:\n    >>>     with Archive(db_old, legacy=True) as db_old:\n    >>>         db.import_outdated(db_old)\n    \"\"\"\n    \n    def __init__(\n            self,\n            dbfile: str,\n            custom_fields: dict[str, Any] | None = None,\n            postgres: bool = False,\n            user: str = 'postgres',\n            password: str = '1234',\n            host: str = 'localhost',\n            port: int = 5432,\n            cleanup: bool = True,\n            legacy: bool = False\n    ):\n        \n        if dbfile.endswith('.csv'):\n            raise RuntimeError(\"Please create a new Archive database and import the\"\n                               \"CSV file using db.import_outdated('<file>.csv').\")\n        # check for driver, if postgres then check if server is reachable\n        if not postgres:\n            self.driver = 'sqlite'\n            dirname = os.path.dirname(os.path.abspath(dbfile))\n            w_ok = os.access(dirname, os.W_OK)\n            if not w_ok:\n                raise RuntimeError('cannot write to directory {}'.format(dirname))\n            # catch if .db extension is missing\n            root, ext = os.path.splitext(dbfile)\n            if len(ext) == 0:\n                dbfile = root + '.db'\n        else:\n            self.driver = 'postgresql'\n            if not self.__check_host(host, port):\n                sys.exit('Server not found!')\n        \n        connect_args = {}\n        \n        # create dict, with which a URL to the db is created\n        if self.driver == 'sqlite':\n            self.url_dict = {'drivername': self.driver,\n                             'database': dbfile,\n                             'query': {'charset': 'utf8'}}\n        if self.driver == 'postgresql':\n            self.url_dict = {'drivername': self.driver,\n                             'username': user,\n                             'password': password,\n                             'host': host,\n                             'port': port,\n                             'database': dbfile}\n            connect_args = {\n                'keepalives': 1,\n                'keepalives_idle': 30,\n                'keepalives_interval': 10,\n                'keepalives_count': 5}\n        \n        # create engine, containing URL and driver\n        log.debug('starting DB engine for {}'.format(URL.create(**self.url_dict)))\n        self.url = URL.create(**self.url_dict)\n        # https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS\n        self.engine = create_engine(url=self.url, echo=False,\n                                    connect_args=connect_args)\n        \n        # call to __load_spatialite() for sqlite, to load mod_spatialite via event handler listen()\n        if self.driver == 'sqlite':\n            log.debug('loading spatialite extension')\n            listen(target=self.engine, identifier='connect', fn=self.__load_spatialite)\n            # check if loading was successful\n            try:\n                with self.engine.begin() as conn:\n                    version = conn.execute('SELECT spatialite_version();')\n            except exc.OperationalError:\n                raise RuntimeError('could not load spatialite extension')\n        \n        # if database is new, (create postgres-db and) enable spatial extension\n        if not database_exists(self.engine.url):\n            if self.driver == 'postgresql':\n                log.debug('creating new PostgreSQL database')\n                create_database(self.engine.url)\n            log.debug('enabling spatial extension for new database')\n            with self.engine.begin() as conn:\n                if self.driver == 'sqlite':\n                    conn.execute(select([func.InitSpatialMetaData(1)]))\n                else:\n                    conn.exec_driver_sql('CREATE EXTENSION IF NOT EXISTS postgis;')\n        # create Session (ORM) and get metadata\n        self.Session = sessionmaker(bind=self.engine)\n        self.meta = MetaData(self.engine)\n        self.custom_fields = custom_fields\n        \n        # load or create tables\n        self.__init_data_table()\n        self.__init_duplicates_table()\n        \n        msg = (\"the 'data' table is missing {}. Please create a new database \"\n               \"and import the old one opened in legacy mode using \"\n               \"Archive.import_outdated.\")\n        pk = sql_inspect(self.data_schema).primary_key\n        if 'product' not in pk.columns.keys() and not legacy:\n            raise RuntimeError(msg.format(\"a primary key 'product'\"))\n        \n        if 'geometry' not in self.get_colnames() and not legacy:\n            raise RuntimeError(msg.format(\"the 'geometry' column\"))\n        \n        self.Base = automap_base(metadata=self.meta)\n        self.Base.prepare(self.engine, reflect=True)\n        self.Data = self.Base.classes.data\n        self.Duplicates = self.Base.classes.duplicates\n        self.dbfile = dbfile\n        \n        if cleanup:\n            log.info('checking for missing scenes')\n            self.cleanup()\n            sys.stdout.flush()\n    \n    def add_tables(\n            self,\n            tables: Table | list[Table],\n    ) -> None:\n        \"\"\"\n        Add tables to the database per :class:`sqlalchemy.schema.Table`\n        Tables provided here will be added to the database.\n\n        .. note::\n\n            Columns using Geometry must have setting management=True for SQLite,\n            for example: ``geometry = Column(Geometry('POLYGON', management=True, srid=4326))``\n\n        Parameters\n        ----------\n        tables:\n            The table(s) to be added to the database.\n        \"\"\"\n        created = []\n        if isinstance(tables, list):\n            for table in tables:\n                table.metadata = self.meta\n                if not sql_inspect(self.engine).has_table(str(table)):\n                    table.create(self.engine)\n                    created.append(str(table))\n        else:\n            table = tables\n            table.metadata = self.meta\n            if not sql_inspect(self.engine).has_table(str(table)):\n                table.create(self.engine)\n                created.append(str(table))\n        log.info('created table(s) {}.'.format(', '.join(created)))\n        self.Base = automap_base(metadata=self.meta)\n        self.Base.prepare(self.engine, reflect=True)\n    \n    def __init_data_table(self) -> None:\n        if sql_inspect(self.engine).has_table('data'):\n            self.data_schema = Table('data', self.meta, autoload_with=self.engine)\n            return\n        \n        log.debug(\"creating DB table 'data'\")\n        \n        self.data_schema = Table('data', self.meta,\n                                 Column('sensor', String),\n                                 Column('orbit', String),\n                                 Column('orbitNumber_abs', Integer),\n                                 Column('orbitNumber_rel', Integer),\n                                 Column('cycleNumber', Integer),\n                                 Column('frameNumber', Integer),\n                                 Column('acquisition_mode', String),\n                                 Column('start', String),\n                                 Column('stop', String),\n                                 Column('product', String, primary_key=True),\n                                 Column('samples', Integer),\n                                 Column('lines', Integer),\n                                 Column('outname_base', String, primary_key=True),\n                                 Column('scene', String),\n                                 Column('hh', Integer),\n                                 Column('vv', Integer),\n                                 Column('hv', Integer),\n                                 Column('vh', Integer),\n                                 Column('geometry', Geometry(geometry_type='POLYGON',\n                                                             management=True, srid=4326)))\n        # add custom fields\n        if self.custom_fields is not None:\n            for key, val in self.custom_fields.items():\n                if val in ['Integer', 'integer', 'int']:\n                    self.data_schema.append_column(Column(key, Integer))\n                elif val in ['String', 'string', 'str']:\n                    self.data_schema.append_column(Column(key, String))\n                else:\n                    log.info('Value in dict custom_fields must be \"integer\" or \"string\"!')\n        \n        self.data_schema.create(self.engine)\n    \n    def __init_duplicates_table(self) -> None:\n        # create tables if not existing\n        if sql_inspect(self.engine).has_table('duplicates'):\n            self.duplicates_schema = Table('duplicates', self.meta, autoload_with=self.engine)\n            return\n        \n        log.debug(\"creating DB table 'duplicates'\")\n        \n        self.duplicates_schema = Table('duplicates', self.meta,\n                                       Column('outname_base', String, primary_key=True),\n                                       Column('scene', String, primary_key=True))\n        self.duplicates_schema.create(self.engine)\n    \n    @staticmethod\n    def __load_spatialite(dbapi_conn: sqlite3.Connection, connection_record: Any) -> None:\n        \"\"\"\n        loads the spatialite extension for SQLite, not to be used outside the init()\n\n        Parameters\n        ----------\n        dbapi_conn:\n            db engine\n        connection_record:\n            not sure what it does, but it is needed by :func:`sqlalchemy.event.listen`\n        \"\"\"\n        dbapi_conn.enable_load_extension(True)\n        # check which platform and use according mod_spatialite\n        if platform.system() == 'Linux':\n            for option in ['mod_spatialite', 'mod_spatialite.so']:\n                try:\n                    dbapi_conn.load_extension(option)\n                except sqlite3.OperationalError:\n                    continue\n        elif platform.system() == 'Darwin':\n            for option in ['mod_spatialite.so', 'mod_spatialite.7.dylib',\n                           'mod_spatialite.dylib']:\n                try:\n                    dbapi_conn.load_extension(option)\n                except sqlite3.OperationalError:\n                    continue\n        else:\n            dbapi_conn.load_extension('mod_spatialite')\n    \n    def __prepare_insertion(self, scene: str | ID) -> Any:\n        \"\"\"\n        read scene metadata and parse a string for inserting it into the database\n\n        Parameters\n        ----------\n        scene:\n            a SAR scene\n\n        Returns\n        -------\n            object of class Data\n        \"\"\"\n        id = scene if isinstance(scene, ID) else identify(scene)\n        pols = [x.lower() for x in id.polarizations]\n        # insertion as an object of Class Data (reflected in the init())\n        insertion = self.Data()\n        colnames = self.get_colnames()\n        for attribute in colnames:\n            if attribute == 'geometry':\n                geom = id.geometry()\n                geom.reproject(4326)\n                geom = geom.convert2wkt(set3D=False)[0]\n                geom = 'SRID=4326;' + str(geom)\n                # set attributes of the Data object according to input\n                setattr(insertion, 'geometry', geom)\n            elif attribute in ['hh', 'vv', 'hv', 'vh']:\n                setattr(insertion, attribute, int(attribute in pols))\n            else:\n                if hasattr(id, attribute):\n                    attr = getattr(id, attribute)\n                elif attribute in id.meta.keys():\n                    attr = id.meta[attribute]\n                else:\n                    raise AttributeError('could not find attribute {}'.format(attribute))\n                value = attr() if inspect.ismethod(attr) else attr\n                setattr(insertion, str(attribute), value)\n        \n        return insertion  # return the Data object\n    \n    def __select_missing(self, table: str) -> list[str]:\n        \"\"\"\n\n        Parameters\n        ----------\n        table:\n            the name of the table\n\n        Returns\n        -------\n            the names of all scenes, which are no longer stored in their registered location\n        \"\"\"\n        with self.Session() as session:\n            if table == 'data':\n                # using ORM query to get all scenes locations\n                scenes = session.query(self.Data.scene)\n            elif table == 'duplicates':\n                scenes = session.query(self.Duplicates.scene)\n            else:\n                raise ValueError(\"parameter 'table' must either be 'data' or 'duplicates'\")\n        files = [self.to_str(x[0]) for x in scenes]\n        return [x for x in files if not os.path.isfile(x)]\n    \n    def insert(\n            self,\n            scene_in: str | ID | list[str | ID],\n            pbar: bool = False,\n            test: bool = False\n    ) -> None:\n        \"\"\"\n        Insert one or many scenes into the database\n\n        Parameters\n        ----------\n        scene_in:\n            a SAR scene or a list of scenes to be inserted\n        pbar:\n            show a progress bar?\n        test:\n            should the insertion only be tested or directly be committed to the database?\n        \"\"\"\n        \n        if isinstance(scene_in, (ID, str)):\n            scene_in = [scene_in]\n        if not isinstance(scene_in, list):\n            raise RuntimeError('scene_in must either be a string pointing to a file, a pyroSAR.ID object '\n                               'or a list containing several of either')\n        \n        log.info('filtering scenes by name')\n        scenes = self.filter_scenelist(scene_in)\n        if len(scenes) == 0:\n            log.info('...nothing to be done')\n            return\n        log.info('identifying scenes and extracting metadata')\n        scenes = identify_many(scenes, pbar=pbar)\n        \n        if len(scenes) == 0:\n            log.info('all scenes are already registered')\n            return\n        \n        counter_regulars = 0\n        counter_duplicates = 0\n        list_duplicates = []\n        \n        message = 'inserting {0} scene{1} into database'\n        log.info(message.format(len(scenes), '' if len(scenes) == 1 else 's'))\n        log.debug('testing changes in temporary database')\n        if pbar:\n            progress = pb.ProgressBar(max_value=len(scenes))\n        else:\n            progress = None\n        insertions = []\n        with self.Session() as session:\n            for i, id in enumerate(scenes):\n                basename = id.outname_base()\n                if not self.is_registered(id):\n                    insertion = self.__prepare_insertion(id)\n                    insertions.append(insertion)\n                    counter_regulars += 1\n                    log.debug('regular:   {}'.format(id.scene))\n                elif not self.__is_registered_in_duplicates(id):\n                    insertion = self.Duplicates(outname_base=basename,\n                                                scene=id.scene)\n                    insertions.append(insertion)\n                    counter_duplicates += 1\n                    log.debug('duplicate: {}'.format(id.scene))\n                else:\n                    list_duplicates.append(id.outname_base())\n                \n                if progress is not None:\n                    progress.update(i + 1)\n            \n            if progress is not None:\n                progress.finish()\n            \n            session.add_all(insertions)\n            \n            if not test:\n                log.debug('committing transactions to permanent database')\n                # commit changes of the session\n                session.commit()\n            else:\n                log.info('rolling back temporary database changes')\n                # roll back changes of the session\n                session.rollback()\n        \n        message = '{0} scene{1} registered regularly'\n        log.info(message.format(counter_regulars, '' if counter_regulars == 1 else 's'))\n        message = '{0} duplicate{1} registered'\n        log.info(message.format(counter_duplicates, '' if counter_duplicates == 1 else 's'))\n    \n    def is_registered(self, scene: str | ID) -> bool:\n        \"\"\"\n        Simple check if a scene is already registered in the database.\n\n        Parameters\n        ----------\n        scene:\n            the SAR scene\n\n        Returns\n        -------\n            is the scene already registered?\n        \"\"\"\n        id = scene if isinstance(scene, ID) else identify(scene)\n        with self.Session() as session:\n            # ORM query, where scene equals id.scene, return first\n            exists_data = session.query(self.Data.outname_base).filter_by(\n                outname_base=id.outname_base(), product=id.product).first()\n            exists_duplicates = session.query(self.Duplicates.outname_base).filter(\n                self.Duplicates.outname_base == id.outname_base()).first()\n        in_data = False\n        in_dup = False\n        if exists_data:\n            in_data = len(exists_data) != 0\n        if exists_duplicates:\n            in_dup = len(exists_duplicates) != 0\n        return in_data or in_dup\n    \n    def __is_registered_in_duplicates(self, scene: str | ID) -> bool:\n        \"\"\"\n        Simple check if a scene is already registered in the database.\n\n        Parameters\n        ----------\n        scene:\n            the SAR scene\n\n        Returns\n        -------\n            is the scene already registered?\n        \"\"\"\n        id = scene if isinstance(scene, ID) else identify(scene)\n        with self.Session() as session:\n            # ORM query as in is registered\n            exists_duplicates = session.query(self.Duplicates.outname_base).filter(\n                self.Duplicates.outname_base == id.outname_base()).first()\n        in_dup = False\n        if exists_duplicates:\n            in_dup = len(exists_duplicates) != 0\n        return in_dup\n    \n    def cleanup(self) -> None:\n        \"\"\"\n        Remove all scenes from the database, which are no longer stored in their registered location\n        \"\"\"\n        missing = self.__select_missing('data')\n        for scene in missing:\n            log.info('Removing missing scene from database tables: {}'.format(scene))\n            self.drop_element(scene, with_duplicates=True)\n    \n    @staticmethod\n    def to_str(string: str | bytes, encoding: str = 'utf-8') -> str:\n        if isinstance(string, bytes):\n            return string.decode(encoding)\n        else:\n            return string\n    \n    def export2shp(self, path: str, table: str = 'data') -> None:\n        \"\"\"\n        export the database to a shapefile\n\n        Parameters\n        ----------\n        path:\n            the path of the shapefile to be written.\n            This will overwrite other files with the same name.\n            If a folder is given in path it is created if not existing.\n            If the file extension is missing '.shp' is added.\n        table:\n            the table to write to the shapefile; either 'data' (default) or 'duplicates'\n        \"\"\"\n        if table not in self.get_tablenames():\n            log.warning('Only data and duplicates can be exported!')\n            return\n        \n        # add the .shp extension if missing\n        if not path.endswith('.shp'):\n            path += '.shp'\n        \n        # creates folder if not present, adds .shp if not within the path\n        dirname = os.path.dirname(path)\n        os.makedirs(dirname, exist_ok=True)\n        \n        launder_names = {'acquisition_mode': 'acq_mode',\n                         'orbitNumber_abs': 'orbit_abs',\n                         'orbitNumber_rel': 'orbit_rel',\n                         'cycleNumber': 'cycleNr',\n                         'frameNumber': 'frameNr',\n                         'outname_base': 'outname'}\n        \n        sel_tables = ', '.join([f'\"{s}\" as {launder_names[s]}' if s in launder_names else s\n                                for s in self.get_colnames(table)])\n        \n        if self.driver == 'sqlite':\n            srcDS = self.dbfile\n        elif self.driver == 'postgresql':\n            srcDS = \"\"\"PG:host={host} port={port} user={username}\n            dbname={database} password={password} active_schema=public\"\"\".format(**self.url_dict)\n        else:\n            raise RuntimeError('unknown archive driver')\n        \n        gdal.VectorTranslate(destNameOrDestDS=path, srcDS=srcDS,\n                             format='ESRI Shapefile',\n                             SQLStatement=f'SELECT {sel_tables} FROM {table}',\n                             SQLDialect=self.driver)\n    \n    def filter_scenelist(self, scenelist: list[str | ID]) -> list[str | ID]:\n        \"\"\"\n        Filter a list of scenes by file names already registered in the database.\n\n        Parameters\n        ----------\n        scenelist:\n            the scenes to be filtered\n\n        Returns\n        -------\n            The objects of `scenelist` for all scenes whose basename\n            is not yet registered in the database.\n\n        \"\"\"\n        for item in scenelist:\n            if not isinstance(item, (ID, str)):\n                raise TypeError(\"items in scenelist must be of type 'str' or 'pyroSAR.ID'\")\n        \n        with self.Session() as session:\n            # ORM query, get all scenes locations\n            scenes_data = session.query(self.Data.scene)\n            registered = [os.path.basename(self.to_str(x[0])) for x in scenes_data]\n            scenes_duplicates = session.query(self.Duplicates.scene)\n        duplicates = [os.path.basename(self.to_str(x[0])) for x in scenes_duplicates]\n        names = [item.scene if isinstance(item, ID) else item for item in scenelist]\n        filtered = [x for x, y in zip(scenelist, names)\n                    if os.path.basename(y) not in registered + duplicates]\n        return filtered\n    \n    def get_colnames(self, table: str = 'data') -> list[str]:\n        \"\"\"\n        Return the names of all columns of a table.\n\n        Returns\n        -------\n            the column names of the chosen table\n        \"\"\"\n        # get all columns of `table`, but shows geometry columns not correctly\n        table_info = Table(table, self.meta, autoload=True, autoload_with=self.engine)\n        col_names = table_info.c.keys()\n        \n        return sorted([self.to_str(x) for x in col_names])\n    \n    def get_tablenames(self, return_all: bool = False) -> list[str]:\n        \"\"\"\n        Return the names of all tables in the database\n\n        Parameters\n        ----------\n        return_all:\n            only gives tables data and duplicates on default.\n            Set to True to get all other tables and views created automatically.\n\n        Returns\n        -------\n            the table names\n        \"\"\"\n        #  TODO: make this dynamic\n        #  the method was intended to only return user generated tables by default, as well as data and duplicates\n        all_tables = ['ElementaryGeometries', 'SpatialIndex', 'geometry_columns', 'geometry_columns_auth',\n                      'geometry_columns_field_infos', 'geometry_columns_statistics', 'geometry_columns_time',\n                      'spatial_ref_sys', 'spatial_ref_sys_aux', 'spatialite_history', 'sql_statements_log',\n                      'sqlite_sequence', 'views_geometry_columns', 'views_geometry_columns_auth',\n                      'views_geometry_columns_field_infos', 'views_geometry_columns_statistics',\n                      'virts_geometry_columns', 'virts_geometry_columns_auth', 'virts_geometry_columns_field_infos',\n                      'virts_geometry_columns_statistics', 'data_licenses', 'KNN']\n        # get tablenames from metadata\n        tables = sorted([self.to_str(x) for x in self.meta.tables.keys()])\n        if return_all:\n            return tables\n        else:\n            ret = []\n            for i in tables:\n                if i not in all_tables and 'idx_' not in i:\n                    ret.append(i)\n            return ret\n    \n    def get_unique_directories(self) -> list[str]:\n        \"\"\"\n        Get a list of directories containing registered scenes\n\n        Returns\n        -------\n            the directory names\n        \"\"\"\n        with self.Session() as session:\n            # ORM query, get all directories\n            scenes = session.query(self.Data.scene)\n        registered = [os.path.dirname(self.to_str(x[0])) for x in scenes]\n        return list(set(registered))\n    \n    def import_outdated(self, dbfile: str | Archive) -> None:\n        \"\"\"\n        import an older database\n\n        Parameters\n        ----------\n        dbfile:\n            the old database. If this is a string, the name of a CSV file is expected.\n        \"\"\"\n        if isinstance(dbfile, str) and dbfile.endswith('csv'):\n            with open(dbfile) as csvfile:\n                text = csvfile.read()\n                csvfile.seek(0)\n                dialect = csv.Sniffer().sniff(text)\n                reader = csv.DictReader(csvfile, dialect=dialect)\n                scenes = []\n                for row in reader:\n                    scenes.append(row['scene'])\n                self.insert(scenes)\n        elif isinstance(dbfile, Archive):\n            with self.engine.begin() as conn:\n                scenes = conn.exec_driver_sql('SELECT scene from data')\n                scenes = [s.scene for s in scenes]\n            self.insert(scenes)\n            reinsert = dbfile.select_duplicates(value='scene')\n            if reinsert is not None:\n                self.insert(reinsert)\n        else:\n            raise RuntimeError(\"'dbfile' must either be a CSV file name or an Archive object\")\n    \n    def move(self, scenelist: list[str], directory: str, pbar: bool = False) -> None:\n        \"\"\"\n        Move a list of files while keeping the database entries up to date.\n        If a scene is registered in the database (in either the data or duplicates table),\n        the scene entry is directly changed to the new location.\n\n        Parameters\n        ----------\n        scenelist:\n            the file locations\n        directory:\n            a folder to which the files are moved\n        pbar:\n            show a progress bar?\n        \"\"\"\n        if not os.path.isdir(directory):\n            os.mkdir(directory)\n        if not os.access(directory, os.W_OK):\n            raise RuntimeError('directory cannot be written to')\n        failed = []\n        double = []\n        if pbar:\n            progress = pb.ProgressBar(max_value=len(scenelist)).start()\n        else:\n            progress = None\n        \n        for i, scene in enumerate(scenelist):\n            new = os.path.join(directory, os.path.basename(scene))\n            if os.path.isfile(new):\n                double.append(new)\n                continue\n            try:\n                shutil.move(scene, directory)\n            except shutil.Error:\n                failed.append(scene)\n                continue\n            finally:\n                if progress is not None:\n                    progress.update(i + 1)\n            if self.select(scene=scene) != 0:\n                table = 'data'\n            else:\n                # using core connection to execute SQL syntax (as was before)\n                query = '''SELECT scene FROM duplicates WHERE scene='{0}' '''.format(scene)\n                with self.engine.begin() as conn:\n                    query_duplicates = conn.exec_driver_sql(query)\n                if len(query_duplicates) != 0:\n                    table = 'duplicates'\n                else:\n                    table = None\n            if table:\n                # using core connection to execute SQL syntax (as was before)\n                query = '''UPDATE {0} SET scene= '{1}' WHERE scene='{2}' '''.format(table, new, scene)\n                with self.engine.begin() as conn:\n                    conn.exec_driver_sql(query)\n        if progress is not None:\n            progress.finish()\n        \n        if len(failed) > 0:\n            log.info('The following scenes could not be moved:\\n{}'.format('\\n'.join(failed)))\n        if len(double) > 0:\n            log.info('The following scenes already exist at the target location:\\n{}'.format('\\n'.join(double)))\n    \n    def select(\n            self,\n            sensor: str | list[str] | None = None,\n            product: str | list[str] | None = None,\n            acquisition_mode: str | list[str] | None = None,\n            mindate: str | datetime | None = None,\n            maxdate: str | datetime | None = None,\n            vectorobject: Vector | None = None,\n            date_strict: bool = True,\n            processdir: str | None = None,\n            recursive: bool = False,\n            polarizations: list[str] | None = None,\n            return_value: str | list[str] = \"scene\",\n            **kwargs: Any\n    ) -> list[str | bytes] | list[tuple[str | bytes]]:\n        \"\"\"\n        select scenes from the database\n\n        Parameters\n        ----------\n        sensor:\n            the satellite sensor(s)\n        product:\n            the product type(s)\n        acquisition_mode:\n            the sensor's acquisition mode(s)\n        mindate:\n            the minimum acquisition date; strings must be in format YYYYmmddTHHMMSS; default: None\n        maxdate:\n            the maximum acquisition date; strings must be in format YYYYmmddTHHMMSS; default: None\n        vectorobject:\n            a geometry with which the scenes need to overlap. The object may only contain one feature.\n        date_strict:\n            treat dates as strict limits or also allow flexible limits to incorporate scenes\n            whose acquisition period overlaps with the defined limit?\n\n            - strict: start >= mindate & stop <= maxdate\n            - not strict: stop >= mindate & start <= maxdate\n        processdir:\n            A directory to be scanned for already processed scenes;\n            the selected scenes will be filtered to those that have not yet been processed. Default: None\n        recursive:\n            (only if `processdir` is not None) should also the subdirectories of the `processdir` be scanned?\n        polarizations:\n            a list of polarization strings, e.g. ['HH', 'VV']\n        return_value:\n            the query return value(s). Options:\n\n            - `geometry_wkb`: the scene's footprint geometry formatted as WKB\n            - `geometry_wkt`: the scene's footprint geometry formatted as WKT\n            - `mindate`: the acquisition start datetime in UTC formatted as YYYYmmddTHHMMSS\n            - `maxdate`: the acquisition end datetime in UTC formatted as YYYYmmddTHHMMSS\n            - all further database column names (see :meth:`~Archive.get_colnames()`)\n\n        **kwargs:\n            any further arguments (columns), which are registered in the database. See :meth:`~Archive.get_colnames()`\n\n        Returns\n        -------\n            If a single return_value is specified: list of values for that attribute.\n            If multiple return_values are specified: list of tuples containing the requested attributes.\n            The return value type is bytes for `geometry_wkb` and str for all others.\n        \"\"\"\n        # Convert return_value to list if it's a string\n        if isinstance(return_value, str):\n            return_values = [return_value]\n        else:\n            return_values = return_value\n        \n        return_values_sql = []\n        for val in return_values:\n            if val == 'mindate':\n                return_values_sql.append('start')\n            elif val == 'maxdate':\n                return_values_sql.append('stop')\n            elif val == 'geometry_wkt':\n                prefix = 'ST_' if self.driver == 'postgresql' else ''\n                return_values_sql.append(f'{prefix}AsText(geometry) as geometry_wkt')\n            elif val == 'geometry_wkb':\n                prefix = 'ST_' if self.driver == 'postgresql' else ''\n                return_values_sql.append(f'{prefix}AsBinary(geometry) as geometry_wkb')\n            else:\n                return_values_sql.append(val)\n        \n        # Validate that all requested return values exist in the database\n        valid_columns = self.get_colnames()\n        extra = ['mindate', 'maxdate', 'geometry_wkt', 'geometry_wkb']\n        normal_returns = [x for x in return_values if x not in extra]\n        invalid_returns = [x for x in normal_returns if x not in valid_columns]\n        if invalid_returns:\n            invalid_str = ', '.join(invalid_returns)\n            msg = (f\"The following options are not supported as \"\n                   f\"return values: {invalid_str}\")\n            raise ValueError(msg)\n        \n        arg_valid = [x for x in kwargs.keys() if x in self.get_colnames()]\n        arg_invalid = [x for x in kwargs.keys() if x not in self.get_colnames()]\n        if len(arg_invalid) > 0:\n            log.info(f\"the following arguments will be ignored as they are not \"\n                     f\"registered in the data base: {', '.join(arg_invalid)}\")\n        \n        def convert_general(k: str, v: Any) -> str:\n            if isinstance(v, (float, int, str)):\n                return f\"\"\"{k}='{v}'\"\"\"\n            elif isinstance(v, (tuple, list)):\n                v_str = \"', '\".join(map(str, v))\n                return f\"\"\"{k} IN ('{v_str}')\"\"\"\n            else:\n                raise TypeError(f\"unsupported type for '{k}': {type(v)}\")\n        \n        arg_format = []\n        vals = []\n        for key in arg_valid:\n            if key == 'scene':\n                arg_format.append('''scene LIKE '%%{0}%%' '''.format(os.path.basename(kwargs[key])))\n            else:\n                arg_format.append(convert_general(key, kwargs[key]))\n        \n        if sensor:\n            arg_format.append(convert_general('sensor', sensor))\n        \n        if product:\n            arg_format.append(convert_general('product', product))\n        \n        if acquisition_mode:\n            arg_format.append(convert_general('acquisition_mode', acquisition_mode))\n        \n        if mindate:\n            if isinstance(mindate, datetime):\n                mindate = mindate.strftime('%Y%m%dT%H%M%S')\n            if re.search('[0-9]{8}T[0-9]{6}', mindate):\n                if date_strict:\n                    arg_format.append('start>=?')\n                else:\n                    arg_format.append('stop>=?')\n                vals.append(mindate)\n            else:\n                log.info('WARNING: argument mindate is ignored, must be in format YYYYmmddTHHMMSS')\n        \n        if maxdate:\n            if isinstance(maxdate, datetime):\n                maxdate = maxdate.strftime('%Y%m%dT%H%M%S')\n            if re.search('[0-9]{8}T[0-9]{6}', maxdate):\n                if date_strict:\n                    arg_format.append('stop<=?')\n                else:\n                    arg_format.append('start<=?')\n                vals.append(maxdate)\n            else:\n                log.info('WARNING: argument maxdate is ignored, must be in format YYYYmmddTHHMMSS')\n        \n        if polarizations:\n            for pol in polarizations:\n                if pol in ['HH', 'VV', 'HV', 'VH']:\n                    arg_format.append('{}=1'.format(pol.lower()))\n        \n        if vectorobject:\n            if isinstance(vectorobject, Vector):\n                if vectorobject.nfeatures > 1:\n                    raise RuntimeError(\"'vectorobject' contains more than one feature.\")\n                with vectorobject.clone() as vec:\n                    vec.reproject(4326)\n                    site_geom = vec.convert2wkt(set3D=False)[0]\n                # postgres has a different way to store geometries\n                if self.driver == 'postgresql':\n                    statement = f\"st_intersects(geometry, 'SRID=4326; {site_geom}')\"\n                    arg_format.append(statement)\n                else:\n                    arg_format.append('st_intersects(GeomFromText(?, 4326), geometry) = 1')\n                    vals.append(site_geom)\n            else:\n                log.info('WARNING: argument vectorobject is ignored, must be of type spatialist.vector.Vector')\n        \n        if len(arg_format) > 0:\n            subquery = ' WHERE {}'.format(' AND '.join(arg_format))\n        else:\n            subquery = ''\n        \n        # Modify the query to select the requested return values\n        query = 'SELECT {}, outname_base FROM data{}'.format(', '.join(return_values_sql), subquery)\n        \n        # the query gets assembled stepwise here\n        for val in vals:\n            query = query.replace('?', \"\"\"'{0}'\"\"\", 1).format(val)\n        log.debug(query)\n        \n        # core SQL execution\n        with self.engine.begin() as conn:\n            query_rs = conn.exec_driver_sql(query)\n            \n            if processdir and os.path.isdir(processdir):\n                scenes = [x for x in query_rs\n                          if len(finder(processdir, [x[-1]],\n                                        regex=True, recursive=recursive)) == 0]\n            else:\n                scenes = query_rs\n            \n            ret = []\n            for x in scenes:\n                # If only one return value was requested, append just that value\n                if len(return_values) == 1:\n                    ret.append(self.to_str(x[0]))\n                else:\n                    # If multiple return values were requested, append a tuple of all values\n                    values = []\n                    for k, v in zip(return_values, x[:-1]):  # Exclude outname_base\n                        if k == 'geometry_wkb':\n                            values.append(v)\n                        else:\n                            values.append(self.to_str(v))\n                    ret.append(tuple(values))\n        return ret\n    \n    def select_duplicates(\n            self,\n            outname_base: str | None = None,\n            scene: str | None = None,\n            value: Literal[\"id\", \"scene\"] = \"id\"\n    ) -> list[str]:\n        \"\"\"\n        Select scenes from the duplicates table. In case both `outname_base` and `scene` are set to None all scenes in\n        the table are returned, otherwise only those that match the attributes `outname_base` and `scene` if they are not None.\n\n        Parameters\n        ----------\n        outname_base:\n            the basename of the scene\n        scene:\n            the scene name\n        value:\n            the return value; either 'id' or 'scene'\n\n        Returns\n        -------\n            the selected scene(s)\n        \"\"\"\n        if value == 'id':\n            key = 0\n        elif value == 'scene':\n            key = 1\n        else:\n            raise ValueError(\"argument 'value' must be either 0 or 1\")\n        \n        with self.engine.begin() as conn:\n            if not outname_base and not scene:\n                # core SQL execution\n                scenes = conn.exec_driver_sql('SELECT * from duplicates')\n            else:\n                cond = []\n                arg = []\n                if outname_base:\n                    cond.append('outname_base=?')\n                    arg.append(outname_base)\n                if scene:\n                    cond.append('scene=?')\n                    arg.append(scene)\n                query = 'SELECT * from duplicates WHERE {}'.format(' AND '.join(cond))\n                for a in arg:\n                    query = query.replace('?', ''' '{0}' ''', 1).format(a)\n                # core SQL execution\n                scenes = conn.exec_driver_sql(query)\n            \n            ret = []\n            for x in scenes:\n                ret.append(self.to_str(x[key]))\n        \n        return ret\n    \n    @property\n    def size(self) -> tuple[int, int]:\n        \"\"\"\n        get the number of scenes registered in the database\n\n        Returns\n        -------\n            the number of scenes in (1) the main table and (2) the duplicates table\n        \"\"\"\n        # ORM query\n        with self.Session() as session:\n            r1 = session.query(self.Data.outname_base).count()\n            r2 = session.query(self.Duplicates.outname_base).count()\n        return r1, r2\n    \n    def __enter__(self) -> Archive:\n        return self\n    \n    def close(self) -> None:\n        \"\"\"\n        close the database connection\n        \"\"\"\n        self.engine.dispose()\n        gc.collect(generation=2)  # this was added as a fix for win PermissionError when deleting sqlite.db files.\n    \n    def __exit__(\n            self,\n            exc_type: type[BaseException] | None,\n            exc_val: BaseException | None,\n            exc_tb: TracebackType | None\n    ) -> None:\n        self.close()\n    \n    def drop_element(\n            self,\n            scene: str,\n            with_duplicates: bool = False\n    ) -> None:\n        \"\"\"\n        Drop a scene from the data table.\n        If the duplicates table contains a matching entry, it will be moved to the data table.\n\n        Parameters\n        ----------\n        scene:\n            a SAR scene\n        with_duplicates:\n            True: delete matching entry in duplicates table\n            False: move matching entry from duplicates into data table\n        \"\"\"\n        # save outname_base from to be deleted entry\n        search = self.data_schema.select().where(self.data_schema.c.scene == scene)\n        entry_data_outname_base = []\n        with self.engine.begin() as conn:\n            for rowproxy in conn.execute(search):\n                entry_data_outname_base.append((rowproxy[12]))\n        # log.info(entry_data_outname_base)\n        \n        # delete entry in data table\n        delete_statement = self.data_schema.delete().where(self.data_schema.c.scene == scene)\n        with self.engine.begin() as conn:\n            conn.execute(delete_statement)\n        \n        return_sentence = 'Entry with scene-id: \\n{} \\nwas dropped from data'.format(scene)\n        \n        # with_duplicates == True, delete entry from duplicates\n        if with_duplicates:\n            delete_statement_dup = self.duplicates_schema.delete().where(\n                self.duplicates_schema.c.outname_base == entry_data_outname_base[0])\n            with self.engine.begin() as conn:\n                conn.execute(delete_statement_dup)\n            \n            log.info(return_sentence + ' and duplicates!'.format(scene))\n            return\n        \n        # else select scene info matching outname_base from duplicates\n        select_in_duplicates_statement = self.duplicates_schema.select().where(\n            self.duplicates_schema.c.outname_base == entry_data_outname_base[0])\n        entry_duplicates_scene = []\n        with self.engine.begin() as conn:\n            for rowproxy in conn.execute(select_in_duplicates_statement):\n                entry_duplicates_scene.append((rowproxy[1]))\n        \n        # check if there is a duplicate\n        if len(entry_duplicates_scene) == 1:\n            # remove entry from duplicates\n            delete_statement_dup = self.duplicates_schema.delete().where(\n                self.duplicates_schema.c.outname_base == entry_data_outname_base[0])\n            with self.engine.begin() as conn:\n                conn.execute(delete_statement_dup)\n            \n            # insert scene from duplicates into data\n            self.insert(entry_duplicates_scene[0])\n            \n            return_sentence += ' and entry with outname_base \\n{} \\nand scene \\n{} \\n' \\\n                               'was moved from duplicates into data table'.format(\n                entry_data_outname_base[0], entry_duplicates_scene[0])\n        \n        log.info(return_sentence + '!')\n    \n    def drop_table(self, table: str) -> None:\n        \"\"\"\n        Drop a table from the database.\n\n        Parameters\n        ----------\n        table:\n            the table name\n        \"\"\"\n        if table in self.get_tablenames(return_all=True):\n            # this removes the idx tables and entries in geometry_columns for sqlite databases\n            if self.driver == 'sqlite':\n                with self.engine.begin() as conn:\n                    query = \"SELECT f_table_name FROM geometry_columns\"\n                    tab_with_geom = [rowproxy[0] for rowproxy\n                                     in conn.exec_driver_sql(query)]\n                    if table in tab_with_geom:\n                        conn.exec_driver_sql(\"SELECT DropGeoTable('\" + table + \"')\")\n            else:\n                table_info = Table(table, self.meta, autoload=True, autoload_with=self.engine)\n                table_info.drop(self.engine)\n            log.info('table {} dropped from database.'.format(table))\n        else:\n            raise ValueError(\"table {} is not registered in the database!\".format(table))\n        self.Base = automap_base(metadata=self.meta)\n        self.Base.prepare(self.engine, reflect=True)\n    \n    @staticmethod\n    def __is_open(ip: str, port: str | int) -> bool:\n        \"\"\"\n        Checks server connection, from Ben Curtis (github: Fmstrat)\n\n        Parameters\n        ----------\n        ip:\n            ip of the server\n        port:\n            port of the server\n\n        Returns\n        -------\n            is the server reachable?\n\n        \"\"\"\n        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n        s.settimeout(3)\n        try:\n            s.connect((ip, int(port)))\n            s.shutdown(socket.SHUT_RDWR)\n            return True\n        except:\n            return False\n        finally:\n            s.close()\n    \n    def __check_host(self, ip: str, port: str | int) -> bool:\n        \"\"\"\n        Calls __is_open() on ip and port, from Ben Curtis (github: Fmstrat)\n\n        Parameters\n        ----------\n        ip:\n            ip of the server\n        port:\n            port of the server\n\n        Returns\n        -------\n            is the server reachable?\n        \"\"\"\n        ipup = False\n        for i in range(2):\n            if self.__is_open(ip, port):\n                ipup = True\n                break\n            else:\n                time.sleep(5)\n        return ipup\n\n\ndef drop_archive(archive: Archive) -> None:\n    \"\"\"\n    drop (delete) a scene database\n\n    Parameters\n    ----------\n    archive:\n        the database to be deleted\n\n    See Also\n    --------\n    :func:`sqlalchemy_utils.functions.drop_database()`\n\n    Examples\n    --------\n    >>> pguser = os.environ.get('PGUSER')\n    >>> pgpassword = os.environ.get('PGPASSWORD')\n\n    >>> db = Archive('test', postgres=True, port=5432, user=pguser, password=pgpassword)\n    >>> drop_archive(db)\n    \"\"\"\n    if archive.driver == 'postgresql':\n        url = archive.url\n        archive.close()\n        drop_database(url)\n    else:\n        raise RuntimeError('this function only works for PostgreSQL databases.'\n                           'For SQLite databases it is recommended to just delete the DB file.')\n"
  },
  {
    "path": "pyroSAR/auxdata.py",
    "content": "###############################################################################\n# tools for handling auxiliary data in software pyroSAR\n\n# Copyright (c) 2019-2026, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\nimport os\nimport re\nimport csv\nimport ssl\nimport json\nimport numpy\nimport fnmatch\nimport ftplib\nimport requests\nimport zipfile as zf\nfrom lxml import etree\nfrom math import ceil, floor\nfrom urllib.parse import urlparse\nfrom collections import defaultdict\nfrom packaging import version\nfrom pyroSAR.examine import ExamineSnap\nfrom pyroSAR.ancillary import Lock\nfrom spatialist.raster import Raster, Dtype\nfrom spatialist.vector import bbox\nfrom spatialist.ancillary import dissolve, finder\nfrom spatialist.auxil import gdalbuildvrt, crsConvert, gdalwarp\nfrom spatialist.envi import HDRobject\nfrom osgeo import gdal\n\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef dem_autoload(geometries, demType, vrt=None, buffer=None, username=None,\n                 password=None, product='dem', crop=True, lock_timeout=600,\n                 offline=False):\n    \"\"\"\n    obtain all relevant DEM tiles for selected geometries and optionally mosaic them in a VRT.\n\n    Parameters\n    ----------\n    geometries: list[spatialist.vector.Vector] or None\n        a list of :class:`spatialist.vector.Vector` geometries to obtain DEM data for;\n        CRS must be WGS84 LatLon (EPSG 4326). Can be set to None for global extent.\n    demType: str\n        the type of DEM to be used; current options:\n\n        - 'AW3D30' (ALOS Global Digital Surface Model \"ALOS World 3D - 30m\")\n\n          * info: https://www.eorc.jaxa.jp/ALOS/en/aw3d30/index.htm\n          * url: ftp://ftp.eorc.jaxa.jp/pub/ALOS/ext1/AW3D30/release_v1804\n          * height reference: EGM96\n\n        - 'Copernicus 10m EEA DEM' (Copernicus 10 m DEM available over EEA-39 countries)\n\n          * registration: https://spacedata.copernicus.eu/web/cscda/data-access/registration\n          * url: ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_EEA-10-DGED/2021_1\n          * height reference: EGM2008\n\n        - 'Copernicus 30m Global DEM'\n          \n          * info: https://registry.opendata.aws/copernicus-dem\n          * url: https://copernicus-dem-30m-stac.s3.amazonaws.com\n          * height reference: EGM2008\n\n        - 'Copernicus 30m Global DEM II'\n        \n          * registration: https://spacedata.copernicus.eu/web/cscda/data-access/registration\n          * url: ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_GLO-30-DGED/2021_1\n          * height reference: EGM2008\n        \n        - 'Copernicus 90m Global DEM'\n     \n          * info: https://registry.opendata.aws/copernicus-dem\n          * url: https://copernicus-dem-90m-stac.s3.amazonaws.com\n          * height reference: EGM2008\n        \n        - 'Copernicus 90m Global DEM II'\n        \n          * registration: https://spacedata.copernicus.eu/web/cscda/data-access/registration\n          * url: ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_GLO-90-DGED/2021_1\n          * height reference: EGM2008\n        \n        - 'GETASSE30'\n        \n          * info: https://seadas.gsfc.nasa.gov/help-8.1.0/desktop/GETASSE30ElevationModel.html\n          * url: https://step.esa.int/auxdata/dem/GETASSE30\n          * height reference: WGS84\n        \n        - 'SRTM 1Sec HGT'\n\n          * url: https://step.esa.int/auxdata/dem/SRTMGL1\n          * height reference: EGM96\n\n        - 'SRTM 3Sec'\n\n          * url: https://step.esa.int/auxdata/dem/SRTM90/tiff\n          * height reference: EGM96\n\n    vrt: str or None\n        an optional GDAL VRT file created from the obtained DEM tiles\n    buffer: int, float, None\n        a buffer in degrees to add around the individual geometries\n    username: str or None\n        (optional) the username for services requiring registration\n    password: str or None\n        (optional) the password for the registration account\n    product: str\n        the sub-product to extract from the DEM product.\n        The following options are available for the respective DEM types:\n        \n        - 'AW3D30'\n        \n          * 'dem': the actual Digital Elevation Model\n          * 'msk': mask information for each pixel (Cloud/Snow Mask, Land water and\n            low correlation mask, Sea mask, Information of elevation dataset used\n            for the void-filling processing)\n          * 'stk': number of DSM-scene files which were used to produce the 5 m resolution DSM\n        \n        - 'Copernicus 10m EEA DEM'\n        \n          * 'dem': the actual Digital Elevation Model\n          * 'edm': editing mask\n          * 'flm': filling mask\n          * 'hem': height error mask\n          * 'wbm': water body mask\n        \n        - 'Copernicus 30m Global DEM'\n        \n          * 'dem': the actual Digital Elevation Model\n          * 'edm': Editing Mask\n          * 'flm': Filling Mask\n          * 'hem': Height Error Mask\n          * 'wbm': Water Body Mask\n        \n        - 'Copernicus 30m Global DEM II'\n        \n          * 'dem': the actual Digital Elevation Model\n          * 'edm': editing mask\n          * 'flm': filling mask\n          * 'hem': height error mask\n          * 'wbm': water body mask\n        \n        - 'Copernicus 90m Global DEM'\n        \n          * 'dem': the actual Digital Elevation Model\n          * 'edm': Editing Mask\n          * 'flm': Filling Mask\n          * 'hem': Height Error Mask\n          * 'wbm': Water Body Mask\n        \n        - 'Copernicus 90m Global DEM II'\n        \n          * 'dem': the actual Digital Elevation Model\n          * 'edm': editing mask\n          * 'flm': filling mask\n          * 'hem': height error mask\n          * 'wbm': water body mask\n        \n        - 'GETASSE30'\n        \n          * 'dem': the actual Digital Elevation Model\n        \n        - 'SRTM 1Sec HGT'\n        \n          * 'dem': the actual Digital Elevation Model\n        \n        - 'SRTM 3Sec'\n        \n          * 'dem': the actual Digital Elevation Model\n    \n    crop: bool\n        crop to the provided geometries (or return the full extent of the DEM tiles)?\n    lock_timeout: int\n        how long to wait to acquire a lock on the downloaded files?\n    offline: bool\n        work offline? If `True`, only locally existing files are considered\n        and no online check is performed. If a file is missing, an error is\n        raised. For this to work, the function needs to be run in `online`\n        mode once to create a local index.\n    \n    Returns\n    -------\n    list[str] or None\n        the names of the obtained files or None if a VRT file was defined\n    \n    Examples\n    --------\n    download all SRTM 1 arcsec DEMs overlapping with a Sentinel-1 scene and mosaic them to a single GeoTIFF file\n    \n    .. code-block:: python\n        \n        from pyroSAR import identify\n        from pyroSAR.auxdata import dem_autoload\n        from spatialist import gdalwarp\n        \n        # identify the SAR scene\n        filename = 'S1A_IW_SLC__1SDV_20150330T170734_20150330T170801_005264_006A6C_DA69.zip'\n        scene = identify(filename)\n        \n        # extract the bounding box as spatialist.Vector object\n        bbox = scene.bbox()\n        \n        # download the tiles and virtually combine them in an in-memory\n        # VRT file subsetted to the extent of the SAR scene plus a buffer of 0.01 degrees\n        vrt = '/vsimem/srtm1.vrt'\n        dem_autoload(geometries=[bbox], demType='SRTM 1Sec HGT',\n                     vrt=vrt, buffer=0.01)\n        \n        # write the final GeoTIFF file\n        outname = scene.outname_base() + 'srtm1.tif'\n        gdalwarp(src=vrt, dst=outname, options={'format': 'GTiff'})\n        \n        # alternatively use function dem_create and warp the DEM to UTM\n        # including conversion from geoid to ellipsoid heights\n        from pyroSAR.auxdata import dem_create\n        outname = scene.outname_base() + 'srtm1_ellp.tif'\n        dem_create(src=vrt, dst=outname, t_srs=32632, tr=(30, 30),\n                   geoid_convert=True, geoid='EGM96')\n    \"\"\"\n    with DEMHandler(geometries) as handler:\n        return handler.load(dem_type=demType,\n                            username=username,\n                            password=password,\n                            vrt=vrt,\n                            buffer=buffer,\n                            product=product,\n                            crop=crop,\n                            lock_timeout=lock_timeout,\n                            offline=offline)\n\n\ndef dem_create(src, dst, t_srs=None, tr=None, threads=None,\n               geoid_convert=False, geoid='EGM96', nodata=None,\n               resampleAlg='bilinear', dtype=None, pbar=False,\n               **kwargs):\n    \"\"\"\n    Create a new DEM GeoTIFF file and optionally convert heights from geoid to ellipsoid.\n    This is basically a convenience wrapper around :func:`osgeo.gdal.Warp` via :func:`spatialist.auxil.gdalwarp`.\n    The following argument defaults deviate from those of :func:`osgeo.gdal.WarpOptions`:\n    \n    - `format` is set to 'GTiff'\n    - `resampleAlg` is set to 'bilinear'\n    - `targetAlignedPixels` is set to 'True'\n    \n    \n    Parameters\n    ----------\n    src: str\n        the input dataset, e.g. a VRT from function :func:`dem_autoload`\n    dst: str\n        the output dataset\n    t_srs: None, int, str or osgeo.osr.SpatialReference\n        A target geographic reference system in WKT, EPSG, PROJ4 or OPENGIS format.\n        See function :func:`spatialist.auxil.crsConvert()` for details.\n        Default (None): use the crs of ``src``.\n    tr: None or tuple[int or float]\n        the target resolution as (xres, yres)\n    threads: int, str or None\n        the number of threads to use. Possible values:\n        \n         - Default `None`: use the value of `GDAL_NUM_THREADS` without modification. If `GDAL_NUM_THREADS` is None,\n           multi-threading is still turned on and two threads are used, one for I/O and one for computation.\n         - integer value: temporarily modify `GDAL_NUM_THREADS` and reset it once done.\n           If 1, multithreading is turned off.\n         - `ALL_CPUS`: special string to use all cores/CPUs of the computer; will also temporarily\n           modify `GDAL_NUM_THREADS`.\n    geoid_convert: bool\n        convert geoid heights?\n    geoid: str\n        the geoid model to be corrected, only used if ``geoid_convert == True``; current options:\n        \n         - 'EGM96'\n         - 'EGM2008'\n    nodata: int or float or str or None\n        the no data value of the source and destination files.\n        Can be used if no source nodata value can be read or to override it.\n        A special string 'None' can be used to skip reading the value from the source file.\n    resampleAlg: str\n        the resampling algorithm tu be used. See here for options:\n        https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r\n    dtype: str or None\n        override the data type of the written file; Default None: use same type as source data.\n        Data type notations of GDAL (e.g. `Float32`) and numpy (e.g. `int8`) are supported.\n        See :class:`spatialist.raster.Dtype`.\n    pbar: bool\n        add a progressbar?\n    **kwargs\n        additional keyword arguments to be passed to :func:`spatialist.auxil.gdalwarp`.\n        See :func:`osgeo.gdal.WarpOptions` for options. The following arguments cannot\n        be set as they are controlled internally:\n        \n        - `xRes`, `yRes`: controlled via argument `tr`\n        - `srcSRS`, `dstSRS`: controlled via the CRS of `src` and arguments `t_srs`, `geoid`, `geoid_convert`\n        - `srcNodata`, `dstNodata`: controlled via argument `nodata`\n        - `outputType`: controlled via argument `dtype`\n        - `multithread` controlled via argument `threads`\n    \n    Returns\n    -------\n\n    \"\"\"\n    \n    vrt_check_sources(src)\n    \n    with Raster(src) as ras:\n        if nodata is None:\n            nodata = ras.nodata\n        if tr is None:\n            tr = ras.res\n        epsg_in = ras.epsg\n    \n    if t_srs is None:\n        epsg_out = epsg_in\n    else:\n        epsg_out = crsConvert(t_srs, 'epsg')\n    \n    threads_system = gdal.GetConfigOption('GDAL_NUM_THREADS')\n    if threads is None:\n        threads = threads_system\n        try:\n            threads = int(threads)\n        except (ValueError, TypeError):\n            pass\n    if isinstance(threads, str):\n        if threads != 'ALL_CPUS':\n            raise ValueError(\"unsupported value for 'threads': '{}'\".format(threads))\n        else:\n            multithread = True\n            gdal.SetConfigOption('GDAL_NUM_THREADS', threads)\n    elif isinstance(threads, int):\n        if threads == 1:\n            multithread = False\n        elif threads > 1:\n            multithread = True\n            gdal.SetConfigOption('GDAL_NUM_THREADS', str(threads))\n        else:\n            raise ValueError(\"if 'threads' is of type int, it must be >= 1\")\n    elif threads is None:\n        multithread = True\n    else:\n        raise TypeError(\"'threads' must be of type int, str or None. Is: {}\".format(type(threads)))\n    \n    gdalwarp_args = {'format': 'GTiff', 'multithread': multithread,\n                     'srcNodata': nodata, 'dstNodata': nodata,\n                     'srcSRS': 'EPSG:{}'.format(epsg_in),\n                     'dstSRS': 'EPSG:{}'.format(epsg_out),\n                     'resampleAlg': resampleAlg,\n                     'xRes': tr[0], 'yRes': tr[1],\n                     'targetAlignedPixels': True}\n    \n    if dtype is not None:\n        gdalwarp_args['outputType'] = Dtype(dtype).gdalint\n    \n    if geoid_convert:\n        geoid_epsg = {'EGM96': 5773,\n                      'EGM2008': 3855}\n        if geoid in geoid_epsg.keys():\n            epsg = geoid_epsg[geoid]\n            gdalwarp_args['srcSRS'] += '+{}'.format(epsg)\n            # the following line is a workaround for older GDAL versions that did not\n            # support compound EPSG codes. See https://github.com/OSGeo/gdal/pull/4639.\n            if version.parse(gdal.__version__) < version.parse('3.4.0'):\n                gdalwarp_args['srcSRS'] = crsConvert(gdalwarp_args['srcSRS'], 'proj4')\n        else:\n            raise RuntimeError('geoid model not yet supported')\n        try:\n            get_egm_lookup(geoid=geoid, software='PROJ')\n        except OSError as e:\n            errstr = str(e)\n            raise RuntimeError(errstr)\n    \n    locked = ['xRes', 'yRes', 'srcSRS', 'dstSRS', 'srcNodata',\n              'dstNodata', 'outputType', 'multithread']\n    for key, val in kwargs.items():\n        if key not in locked:\n            gdalwarp_args[key] = val\n        else:\n            msg = \"argument '{}' cannot be set via kwargs as it is set internally.\"\n            raise RuntimeError(msg.format(key))\n    try:\n        if not os.path.isfile(dst):\n            message = 'creating mosaic'\n            crs = gdalwarp_args['dstSRS']\n            if crs != 'EPSG:4326':\n                message += ' and reprojecting to {}'.format(crs)\n            log.info(f'{message}: {dst}')\n            gdalwarp(src=src, dst=dst, pbar=pbar, **gdalwarp_args)\n        else:\n            log.info(f'mosaic already exists: {dst}')\n    except Exception:\n        if os.path.isfile(dst):\n            os.remove(dst)\n        raise\n    finally:\n        gdal.SetConfigOption('GDAL_NUM_THREADS', threads_system)\n\n\nclass DEMHandler:\n    \"\"\"\n    An interface to obtain DEM data for selected geometries.\n    The files are downloaded into the ESA SNAP auxiliary data directory structure.\n    This class is the foundation for the convenience function :func:`~pyroSAR.auxdata.dem_autoload`.\n    \n    Parameters\n    ----------\n    geometries: list[spatialist.vector.Vector] or None\n        a list of geometries\n    \"\"\"\n    \n    def __init__(self, geometries):\n        if not (isinstance(geometries, list) or geometries is None):\n            raise RuntimeError('geometries must be of type list')\n        \n        if geometries is not None:\n            for geometry in geometries:\n                if geometry.getProjection('epsg') != 4326:\n                    raise RuntimeError('input geometry CRS must be WGS84 LatLon (EPSG 4326)')\n        self.geometries = geometries\n        try:\n            self.auxdatapath = ExamineSnap().auxdatapath\n        except AttributeError:\n            self.auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')\n    \n    def __enter__(self):\n        return self\n    \n    def __exit__(self, exc_type, exc_val, exc_tb):\n        return\n    \n    @staticmethod\n    def __applybuffer(extent, buffer):\n        ext = dict(extent)\n        if buffer is not None:\n            ext['xmin'] -= buffer\n            ext['xmax'] += buffer\n            ext['ymin'] -= buffer\n            ext['ymax'] += buffer\n        return ext\n    \n    def __find_first(self, dem_type, product):\n        outdir = os.path.join(self.auxdatapath, 'dem', dem_type)\n        vsi = self.config[dem_type]['vsi']\n        pattern = fnmatch.translate(self.config[dem_type]['pattern'][product])\n        for root, dirs, files in os.walk(outdir):\n            for file in files:\n                if vsi is None:\n                    if re.search(pattern, file):\n                        return os.path.join(root, file)\n                else:\n                    if re.search(r'\\.(?:zip|tar(\\.gz)?)$', file):\n                        fname = os.path.join(root, file)\n                        content = finder(fname, [pattern], regex=True)\n                        if len(content) > 0:\n                            if dem_type == 'GETASSE30':\n                                getasse30_hdr(fname)\n                            return vsi + content[0]\n    \n    @staticmethod\n    def __buildvrt(tiles, vrtfile, pattern, vsi, extent, src_nodata=None,\n                   dst_nodata=None, hide_nodata=False, resolution=None,\n                   tap=True, dst_datatype=None):\n        \"\"\"\n        Build a VRT mosaic from DEM tiles. The VRT is cropped to the specified `extent` but the pixel grid\n        of the source files is preserved and no resampling/shifting is applied.\n        \n        Parameters\n        ----------\n        tiles: list[str]\n            a list of DEM files or compressed archives containing DEM files\n        vrtfile: str\n            the output VRT filename\n        pattern: str\n            the search pattern for finding DEM tiles in compressed archives\n        vsi: str or None\n            the GDAL VSI directive to prepend the DEM tile name, e.g. /vsizip/ or /vsitar/\n        extent: dict\n            a dictionary with keys `xmin`, `ymin`, `xmax` and `ymax`\n        src_nodata: int or float or None\n            the nodata value of the source DEM tiles; default None: read the value from the first item in `tiles`\n        dst_nodata: int or float or None\n            the nodata value of the output VRT file.\n            Default None: do not define a nodata value and use `src_nodata` instead.\n        hide_nodata: bool\n            hide the nodata value of the output VRT file?\n        resolution: tuple[int or float] or None\n            the spatial resolution (X, Y) of the source DEM tiles.\n            Default None: read the value from the first item in `tiles`\n        tap: bool\n            align target pixels?\n        dst_datatype: int or str or None\n            the VRT data type as supported by :class:`spatialist.raster.Dtype`.\n            Default None: use the same data type as the source files.\n        \n        Returns\n        -------\n\n        \"\"\"\n        if vsi is not None and not tiles[0].endswith('.tif'):\n            locals = [vsi + x for x in dissolve([finder(x, [pattern]) for x in tiles])]\n        else:\n            locals = tiles\n        with Raster(locals[0]) as ras:\n            if src_nodata is None:\n                src_nodata = ras.nodata\n            if resolution is None:\n                xres, yres = ras.res\n            else:\n                xres, yres = resolution\n        opts = {'srcNodata': src_nodata,\n                'targetAlignedPixels': tap,\n                'xRes': xres, 'yRes': yres, 'hideNodata': hide_nodata\n                }\n        if dst_nodata is not None:\n            opts['VRTNodata'] = dst_nodata\n        opts['outputBounds'] = (extent['xmin'], extent['ymin'],\n                                extent['xmax'], extent['ymax'])\n        \n        gdalbuildvrt(src=locals, dst=vrtfile, **opts)\n        if dst_datatype is not None:\n            datatype = Dtype(dst_datatype).gdalstr\n            tree = etree.parse(source=vrtfile)\n            band = tree.find(path='VRTRasterBand')\n            band.attrib['dataType'] = datatype\n            tree.write(file=vrtfile, pretty_print=True,\n                       xml_declaration=False, encoding='utf-8')\n    \n    def __commonextent(self, buffer=None):\n        \"\"\"\n        \n        Parameters\n        ----------\n        buffer: int or float or None\n\n        Returns\n        -------\n        dict\n        \"\"\"\n        ext_new = {}\n        for geo in self.geometries:\n            if len(ext_new.keys()) == 0:\n                ext_new = geo.extent\n            else:\n                for key in ['xmin', 'ymin']:\n                    if geo.extent[key] > ext_new[key]:\n                        ext_new[key] = geo.extent[key]\n                for key in ['xmax', 'ymax']:\n                    if geo.extent[key] < ext_new[key]:\n                        ext_new[key] = geo.extent[key]\n        ext_new = self.__applybuffer(ext_new, buffer)\n        return ext_new\n    \n    @staticmethod\n    def __create_dummy_dem(filename, extent):\n        \"\"\"\n        Create a dummy file which spans the given extent and\n        is 1x1 pixels large to be as small as possible.\n        This file is used to create dummy DEMs over ocean.\n        \"\"\"\n        driver = gdal.GetDriverByName('GTiff')\n        dataset = driver.Create(filename, 1, 1, 1, 1)\n        geo = [\n            extent['xmin'],\n            extent['xmax'] - extent['xmin'],\n            0,\n            extent['ymax'],\n            0,\n            extent['ymin'] - extent['ymax']  # negative\n        ]\n        dataset.SetGeoTransform(geo)\n        dataset.SetProjection('EPSG:4326')\n        band = dataset.GetRasterBand(1)\n        band.SetNoDataValue(255)\n        mat = numpy.zeros(shape=(1, 1))\n        band.WriteArray(mat, 0, 0)\n        band.FlushCache()\n        del mat\n        band = None\n        dataset = None\n        driver = None\n    \n    @staticmethod\n    def intrange(extent, step):\n        \"\"\"\n        generate a sequence of integer coordinates marking\n        the tie points of the individual DEM tiles.\n        \n        Parameters\n        ----------\n        extent: dict or None\n            a dictionary with keys `xmin`, `xmax`, `ymin` and `ymax`\n            with coordinates in EPSG:4326 or None to use a global extent.\n        step: int\n            the sequence steps\n\n        Returns\n        -------\n        tuple[range]\n            the integer sequences as (latitude, longitude)\n        \"\"\"\n        if extent is None:\n            lat = range(-90, 90)\n            lon = range(-180, 180)\n        else:\n            lat = range(floor(float(extent['ymin']) / step) * step,\n                        ceil(float(extent['ymax']) / step) * step,\n                        step)\n            lon = range(floor(float(extent['xmin']) / step) * step,\n                        ceil(float(extent['xmax']) / step) * step,\n                        step)\n        return lat, lon\n    \n    def __get_resolution(self, dem_type, y):\n        \"\"\"\n        \n        Parameters\n        ----------\n        dem_type: str\n            the DEM type\n        y: int or float\n            the latitude for which to get the resolution\n\n        Returns\n        -------\n        tuple\n            (xres, yres)\n        \"\"\"\n        for key, val in self.config[dem_type]['resolution'].items():\n            ymin, ymax = [int(y) for y in key.split('-')]\n            if ymin <= abs(y) <= ymax:\n                return val\n    \n    def __local_index(self, dem_type):\n        path = os.path.join(self.auxdatapath, 'dem', dem_type, 'index.json')\n        os.makedirs(os.path.dirname(path), exist_ok=True)\n        if not os.path.isfile(path):\n            with Lock(str(path)):\n                if dem_type in ['Copernicus 30m Global DEM',\n                                'Copernicus 90m Global DEM']:\n                    log.debug(f\"building local index for DEM type '{dem_type}'\")\n                    res = re.search('[39]0', dem_type).group()\n                    catalog_json = f\"dem_cop_{res}.json\"\n                    URL_STAC = self.config[dem_type]['url']\n                    marker = None\n                    out = defaultdict(defaultdict)\n                    while True:\n                        params = {}\n                        if marker:\n                            params[\"marker\"] = marker\n                        r = requests.get(URL_STAC, params=params)\n                        root = etree.fromstring(r.content)\n                        is_truncated = root.find(path=\"./IsTruncated\",\n                                                 namespaces=root.nsmap).text == \"true\"\n                        items = [x.text for x in root.findall(path=\"./Contents/Key\",\n                                                              namespaces=root.nsmap)]\n                        if marker is None:\n                            del items[items.index(catalog_json)]\n                        marker = items[-1]\n                        items = sorted([URL_STAC + '/' + x for x in items])\n                        URL = None\n                        for item in items:\n                            if URL is None:\n                                content = requests.get(item).json()\n                                href = content['assets']['elevation']['href']\n                                URL = 'https://' + urlparse(href).netloc\n                            base = os.path.basename(item).replace('.json', '')\n                            lat = re.search('[NS][0-9]{2}', base).group()\n                            lon = re.search('[EW][0-9]{3}', base).group()\n                            prefix = f\"{URL}/{base}_DEM\"\n                            sub = {\n                                \"dem\": f\"{prefix}/{base}_DEM.tif\",\n                                \"edm\": f\"{prefix}/AUXFILES/{base}_EDM.tif\",\n                                \"flm\": f\"{prefix}/AUXFILES/{base}_FLM.tif\",\n                                \"wbm\": f\"{prefix}/AUXFILES/{base}_WBM.tif\",\n                                \"hem\": f\"{prefix}/AUXFILES/{base}_HEM.tif\"\n                            }\n                            out[lat][lon] = sub\n                        if not is_truncated:\n                            break\n                elif dem_type in ['GETASSE30', 'SRTM 1Sec HGT', 'SRTM 3Sec']:\n                    url = self.config[dem_type]['url']\n                    response = requests.get(url)\n                    response.raise_for_status()\n                    items = re.findall(r'href=\"([^\"]+)\"', response.text)\n                    out = defaultdict(lambda: defaultdict(dict))\n                    patterns = {\n                        'GETASSE30': '(?P<lat>[0-9]{2}[NS])(?P<lon>[0-9]{3}[EW])',\n                        'SRTM 1Sec HGT': '(?P<lat>[NS][0-9]{2})(?P<lon>[EW][0-9]{3})',\n                        'SRTM 3Sec': '(?P<lon>[0-9]{2})_(?P<lat>[0-9]{2})'\n                    }\n                    for item in items:\n                        if item == '../':\n                            continue\n                        link = url.rstrip('/') + '/' + item\n                        coord = re.search(patterns[dem_type], item).groupdict()\n                        out[coord['lat']][coord['lon']] = {'dem': link}\n                else:\n                    raise RuntimeError(f\"local indexing is not supported \"\n                                       f\"for DEM type {dem_type}\")\n                with open(path, 'w') as f:\n                    json.dump(out, f, indent=4)\n        with Lock(str(path), soft=True):\n            with open(path, 'r') as f:\n                index = json.load(f)\n        return index\n    \n    @staticmethod\n    def __retrieve(\n            urls: list[str],\n            outdir: str,\n            offline: bool = False,\n            lock_timeout: int = 600\n    ) -> list[str]:\n        if len(urls) == 0:\n            return []\n        # check that base URL is reachable\n        if not offline:\n            url_parse = urlparse(urls[0])\n            url_base = url_parse.scheme + '://' + url_parse.netloc\n            r = requests.get(url_base)\n            r.raise_for_status()\n            r.close()\n        \n        urls = list(set(urls))\n        os.makedirs(outdir, exist_ok=True)\n        locals = []\n        n = len(urls)\n        for i, remote in enumerate(urls):\n            local = os.path.join(outdir, os.path.basename(remote))\n            if not os.path.isfile(local):\n                if offline:\n                    raise RuntimeError(f'file not found locally: {local}')\n                else:\n                    with Lock(local, timeout=lock_timeout):\n                        r = requests.get(remote)\n                        # a tile might not exist over the ocean\n                        if r.status_code == 404:\n                            r.close()\n                            continue\n                        msg = '[{i: >{w}}/{n}] {l} <<-- {r}'\n                        log.info(msg.format(i=i + 1, w=len(str(n)),\n                                            n=n, l=local, r=remote))\n                        r.raise_for_status()\n                        with open(local, 'wb') as output:\n                            output.write(r.content)\n                        r.close()\n            else:\n                msg = '[{i: >{w}}/{n}] found local file: {l}'\n                log.info(msg.format(i=i + 1, w=len(str(n)), n=n, l=local))\n            if os.path.isfile(local):\n                locals.append(local)\n        return sorted(locals)\n    \n    @staticmethod\n    def __retrieve_ftp(url, filenames, outdir, username, password,\n                       port=0, offline=False, lock_timeout=600):\n        files = list(set(filenames))\n        os.makedirs(outdir, exist_ok=True)\n        \n        parsed = urlparse(url)\n        timeout = 100\n        if not offline:\n            if parsed.scheme == 'ftpes':\n                ftp = ftplib.FTP_TLS(host=parsed.netloc, timeout=timeout)\n                try:\n                    ftp.login(username, password)  # login anonymously before securing control channel\n                except ftplib.error_perm as e:\n                    raise RuntimeError(str(e))\n                ftp.prot_p()  # switch to secure data connection.. IMPORTANT! Otherwise, only the user and password is encrypted and not all the file data.\n            elif parsed.scheme == 'ftps':\n                ftp = ImplicitFTP_TLS()\n                ftp.connect(host=parsed.netloc, timeout=timeout, port=port)\n                ftp.login(username, password)\n            else:\n                ftp = ftplib.FTP(host=parsed.netloc, timeout=timeout)\n                ftp.login()\n            if parsed.path != '':\n                ftp.cwd(parsed.path)\n        else:\n            ftp = None\n        locals = []\n        n = len(files)\n        for i, remote in enumerate(files):\n            local = os.path.join(outdir, os.path.basename(remote))\n            with Lock(local, timeout=lock_timeout):\n                if not os.path.isfile(local) and not offline:\n                    try:\n                        targetlist = ftp.nlst(remote)\n                    except ftplib.error_temp:\n                        continue\n                    address = '{}://{}/{}{}'.format(parsed.scheme, parsed.netloc,\n                                                    parsed.path + '/' if parsed.path != '' else '',\n                                                    remote)\n                    msg = '[{i: >{w}}/{n}] {l} <<-- {r}'\n                    log.info(msg.format(i=i + 1, w=len(str(n)), n=n, l=local, r=address))\n                    with open(local, 'wb') as myfile:\n                        ftp.retrbinary('RETR {}'.format(remote), myfile.write)\n                else:\n                    msg = '[{i: >{w}}/{n}] found local file: {l}'\n                    log.info(msg.format(i=i + 1, w=len(str(n)), n=n, l=local))\n            if os.path.isfile(local):\n                locals.append(local)\n        if ftp is not None:\n            ftp.close()\n        return sorted(locals)\n    \n    @property\n    def config(self):\n        return {\n            'AW3D30': {'url': 'ftp://ftp.eorc.jaxa.jp/pub/ALOS/ext1/AW3D30/release_v1804',\n                       'nodata': {'dem': -9999,\n                                  'msk': 3,\n                                  'stk': 0},\n                       'resolution': {'0-90': (1 / 3600, 1 / 3600)},\n                       'tilesize': 1,\n                       'area_or_point': 'area',\n                       'vsi': '/vsitar/',\n                       'pattern': {'dem': '*DSM.tif',\n                                   'msk': '*MSK.tif',\n                                   'stk': '*STK.tif'},\n                       'datatype': {'dem': 'Int16',\n                                    'msk': 'Byte',\n                                    'stk': 'Byte'},\n                       'authentication': False\n                       },\n            'Copernicus 10m EEA DEM': {'url': 'ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_EEA-10-DGED/2021_1',\n                                       'nodata': {'dem': -32767.0,\n                                                  'edm': 8,\n                                                  'flm': 1,\n                                                  'hem': -32767.0,\n                                                  'wbm': 1},\n                                       'resolution': {'0-50': (1 / 9000, 1 / 9000),\n                                                      '50-60': (1.5 / 9000, 1 / 9000),\n                                                      '60-70': (2 / 9000, 1 / 9000),\n                                                      '70-80': (3 / 9000, 1 / 9000),\n                                                      '80-85': (5 / 9000, 1 / 9000),\n                                                      '85-90': (10 / 9000, 1 / 9000)},\n                                       'tilesize': 1,\n                                       'area_or_point': 'point',\n                                       'vsi': '/vsitar/',\n                                       'port': 990,\n                                       'pattern': {'dem': '*DEM.tif',\n                                                   'edm': '*EDM.tif',\n                                                   'flm': '*FLM.tif',\n                                                   'hem': '*HEM.tif',\n                                                   'wbm': '*WBM.tif'},\n                                       'datatype': {'dem': 'Float32',\n                                                    'edm': 'Byte',\n                                                    'flm': 'Byte',\n                                                    'hem': 'Float32',\n                                                    'wbm': 'Byte'},\n                                       'authentication': True\n                                       },\n            'Copernicus 30m Global DEM': {'url': 'https://copernicus-dem-30m-stac.s3.amazonaws.com',\n                                          'nodata': {'dem': -32767.0,\n                                                     'edm': 8,\n                                                     'flm': 1,\n                                                     'hem': -32767.0,\n                                                     'wbm': 1},\n                                          'resolution': {'0-50': (1 / 3600, 1 / 3600),\n                                                         '50-60': (1.5 / 3600, 1 / 3600),\n                                                         '60-70': (2 / 3600, 1 / 3600),\n                                                         '70-80': (3 / 3600, 1 / 3600),\n                                                         '80-85': (5 / 3600, 1 / 3600),\n                                                         '85-90': (10 / 3600, 1 / 3600)},\n                                          'tilesize': 1,\n                                          'area_or_point': 'point',\n                                          'vsi': None,\n                                          'pattern': {'dem': '*DEM.tif',\n                                                      'edm': '*EDM.tif',\n                                                      'flm': '*FLM.tif',\n                                                      'hem': '*HEM.tif',\n                                                      'wbm': '*WBM.tif'},\n                                          'datatype': {'dem': 'Float32',\n                                                       'edm': 'Byte',\n                                                       'flm': 'Byte',\n                                                       'hem': 'Float32',\n                                                       'wbm': 'Byte'},\n                                          'authentication': False\n                                          },\n            'Copernicus 30m Global DEM II': {\n                'url': 'ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_GLO-30-DGED/2021_1',\n                'nodata': {'dem': -32767.0,\n                           'edm': 8,\n                           'flm': 1,\n                           'hem': -32767.0,\n                           'wbm': 1},\n                'resolution': {'0-50': (1 / 3600, 1 / 3600),\n                               '50-60': (1.5 / 3600, 1 / 3600),\n                               '60-70': (2 / 3600, 1 / 3600),\n                               '70-80': (3 / 3600, 1 / 3600),\n                               '80-85': (5 / 3600, 1 / 3600),\n                               '85-90': (10 / 3600, 1 / 3600)},\n                'tilesize': 1,\n                'area_or_point': 'point',\n                'vsi': '/vsitar/',\n                'port': 990,\n                'pattern': {'dem': '*DEM.tif',\n                            'edm': '*EDM.tif',\n                            'flm': '*FLM.tif',\n                            'hem': '*HEM.tif',\n                            'wbm': '*WBM.tif'},\n                'datatype': {'dem': 'Float32',\n                             'edm': 'Byte',\n                             'flm': 'Byte',\n                             'hem': 'Float32',\n                             'wbm': 'Byte'},\n                'authentication': True\n            },\n            'Copernicus 90m Global DEM': {'url': 'https://copernicus-dem-90m-stac.s3.amazonaws.com',\n                                          'nodata': {'dem': -32767.0,\n                                                     'edm': 8,\n                                                     'flm': 1,\n                                                     'hem': -32767.0,\n                                                     'wbm': 1},\n                                          'resolution': {'0-50': (1 / 1200, 1 / 1200),\n                                                         '50-60': (1.5 / 1200, 1 / 1200),\n                                                         '60-70': (2 / 1200, 1 / 1200),\n                                                         '70-80': (3 / 1200, 1 / 1200),\n                                                         '80-85': (5 / 1200, 1 / 1200),\n                                                         '85-90': (10 / 1200, 1 / 1200)},\n                                          'tilesize': 1,\n                                          'area_or_point': 'point',\n                                          'vsi': None,\n                                          'pattern': {'dem': '*DEM.tif',\n                                                      'edm': '*EDM.tif',\n                                                      'flm': '*FLM.tif',\n                                                      'hem': '*HEM.tif',\n                                                      'wbm': '*WBM.tif'},\n                                          'datatype': {'dem': 'Float32',\n                                                       'edm': 'Byte',\n                                                       'flm': 'Byte',\n                                                       'hem': 'Float32',\n                                                       'wbm': 'Byte'},\n                                          'authentication': False\n                                          },\n            'Copernicus 90m Global DEM II': {\n                'url': 'ftps://cdsdata.copernicus.eu/DEM-datasets/COP-DEM_GLO-90-DGED/2021_1',\n                'nodata': {'dem': -32767.0,\n                           'edm': 8,\n                           'flm': 1,\n                           'hem': -32767.0,\n                           'wbm': 1},\n                'resolution': {'0-50': (1 / 1200, 1 / 1200),\n                               '50-60': (1.5 / 1200, 1 / 1200),\n                               '60-70': (2 / 1200, 1 / 1200),\n                               '70-80': (3 / 1200, 1 / 1200),\n                               '80-85': (5 / 1200, 1 / 1200),\n                               '85-90': (10 / 1200, 1 / 1200)},\n                'tilesize': 1,\n                'area_or_point': 'point',\n                'vsi': '/vsitar/',\n                'port': 990,\n                'pattern': {'dem': '*DEM.tif',\n                            'edm': '*EDM.tif',\n                            'flm': '*FLM.tif',\n                            'hem': '*HEM.tif',\n                            'wbm': '*WBM.tif'},\n                'datatype': {'dem': 'Float32',\n                             'edm': 'Byte',\n                             'flm': 'Byte',\n                             'hem': 'Float32',\n                             'wbm': 'Byte'},\n                'authentication': True\n            },\n            'GETASSE30': {'url': 'https://step.esa.int/auxdata/dem/GETASSE30',\n                          'nodata': {'dem': None},\n                          'resolution': {'0-90': (15 / 1800, 15 / 1800)},\n                          'tilesize': 15,\n                          'area_or_point': 'area',\n                          'vsi': '/vsizip/',\n                          'pattern': {'dem': '*.GETASSE30'},\n                          'datatype': {'dem': 'Int16'},\n                          'authentication': False\n                          },\n            'SRTM 1Sec HGT': {'url': 'https://step.esa.int/auxdata/dem/SRTMGL1',\n                              'nodata': {'dem': -32768.0},\n                              'resolution': {'0-90': (1 / 3600, 1 / 3600)},\n                              'tilesize': 1,\n                              'area_or_point': 'point',\n                              'vsi': '/vsizip/',\n                              'pattern': {'dem': '*.hgt'},\n                              'datatype': {'dem': 'Int16'},\n                              'authentication': False\n                              },\n            'SRTM 3Sec': {'url': 'https://step.esa.int/auxdata/dem/SRTM90/tiff',\n                          'nodata': {'dem': -32768.0},\n                          'resolution': {'0-90': (5 / 6000, 5 / 6000)},\n                          'tilesize': 5,\n                          'area_or_point': 'area',\n                          'vsi': '/vsizip/',\n                          'pattern': {'dem': 'srtm*.tif'},\n                          'datatype': {'dem': 'Int16'},\n                          'authentication': False\n                          },\n            # 'TDX90m': {'url': 'ftpes://tandemx-90m.dlr.de',\n            #            'nodata': {'dem': -32767.0,\n            #                       'am2': 0,\n            #                       'amp': 0,\n            #                       'com': 0,\n            #                       'cov': 0,\n            #                       'hem': -32767.0,\n            #                       'lsm': 0,\n            #                       'wam': 0},\n            #            'resolution': {'0-50': (1 / 1200, 1 / 1200),\n            #                           '50-60': (1.5 / 1200, 1 / 1200),\n            #                           '60-70': (2 / 1200, 1 / 1200),\n            #                           '70-80': (3 / 1200, 1 / 1200),\n            #                           '80-85': (5 / 1200, 1 / 1200),\n            #                           '85-90': (10 / 1200, 1 / 1200)},\n            #            'tilesize': 1,\n            #            'area_or_point': 'point',\n            #            'vsi': '/vsizip/',\n            #            'pattern': {'dem': '*_DEM.tif',\n            #                        'am2': '*_AM2.tif',\n            #                        'amp': '*_AMP.tif',\n            #                        'com': '*_COM.tif',\n            #                        'cov': '*_COV.tif',\n            #                        'hem': '*_HEM.tif',\n            #                        'lsm': '*_LSM.tif',\n            #                        'wam': '*_WAM.tif'},\n            #            'datatype': {'dem': 'Float32',\n            #                         'am2': 'UInt16',\n            #                         'amp': 'UInt16',\n            #                         'com': 'Byte',\n            #                         'cov': 'Byte',\n            #                         'hem': 'Float32',\n            #                         'lsm': 'Byte',\n            #                         'wam': 'Byte'},\n            #            'authentication': True\n            #            }\n        }\n    \n    def load(self, dem_type, vrt=None, buffer=None, username=None,\n             password=None, product='dem', crop=True, lock_timeout=600,\n             offline=False):\n        \"\"\"\n        Download DEM tiles. The result is either returned in a list of file\n        names combined into a VRT mosaic. The VRT is cropped to the combined\n        extent of the geometries, but the pixel grid of the source files is\n        preserved and no resampling/shifting is applied.\n        \n        Parameters\n        ----------\n        dem_type: str\n            the type fo DEM to be used\n        vrt: str or None\n            an optional GDAL VRT file created from the obtained DEM tiles\n        buffer: int or float or None\n            a buffer in degrees to add around the individual geometries\n        username: str or None\n            the download account username\n        password: str or None\n            the download account password\n        product: str\n            the sub-product to extract from the DEM product\n             - 'AW3D30'\n             \n              * 'dem': the actual Digital Elevation Model\n              * 'msk': mask information for each pixel (Cloud/Snow Mask, Land water and\n                low correlation mask, Sea mask, Information of elevation dataset used\n                for the void-filling processing)\n              * 'stk': number of DSM-scene files which were used to produce the 5m resolution DSM\n             \n             - 'Copernicus 10m EEA DEM'\n             \n              * 'dem': the actual Digital Elevation Model\n              * 'edm': Editing Mask\n              * 'flm': Filling Mask\n              * 'hem': Height Error Mask\n              * 'wbm': Water Body Mask\n             \n             - 'Copernicus 30m Global DEM'\n             \n              * 'dem': the actual Digital Elevation Model\n              * 'edm': Editing Mask\n              * 'flm': Filling Mask\n              * 'hem': Height Error Mask\n              * 'wbm': Water Body Mask\n             \n             - 'Copernicus 30m Global DEM II'\n             \n              * 'dem': the actual Digital Elevation Model\n              * 'edm': Editing Mask\n              * 'flm': Filling Mask\n              * 'hem': Height Error Mask\n              * 'wbm': Water Body Mask\n             \n             - 'Copernicus 90m Global DEM'\n             \n              * 'dem': the actual Digital Elevation Model\n              * 'edm': Editing Mask\n              * 'flm': Filling Mask\n              * 'hem': Height Error Mask\n              * 'wbm': Water Body Mask\n             \n             - 'Copernicus 90m Global DEM II'\n             \n              * 'dem': the actual Digital Elevation Model\n              * 'edm': Editing Mask\n              * 'flm': Filling Mask\n              * 'hem': Height Error Mask\n              * 'wbm': Water Body Mask\n             \n             - 'GETASSE30'\n             \n              * 'dem': the actual Digital Elevation Model\n             \n             - 'SRTM 1Sec HGT'\n             \n              * 'dem': the actual Digital Elevation Model\n             \n             - 'SRTM 3Sec'\n             \n              * 'dem': the actual Digital Elevation Model\n             \n             - 'TDX90m'\n             \n              * 'dem': the actual Digital Elevation Model\n              * 'am2': Amplitude Mosaic representing the minimum value\n              * 'amp': Amplitude Mosaic representing the mean value\n              * 'com': Consistency Mask\n              * 'cov': Coverage Map\n              * 'hem': Height Error Map\n              * 'lsm': Layover and Shadow Mask, based on SRTM C-band and Globe DEM data\n              * 'wam': Water Indication Mask\n        crop: bool\n            If a VRT is created, crop it to the spatial extent of the provided geometries\n            or return the full extent of the DEM tiles? In the latter case, the common\n            bounding box of the geometries is expanded so that the coordinates are\n            multiples of the tile size of the respective DEM option.\n        lock_timeout: int\n            how long to wait to acquire a lock on the downloaded files?\n        offline: bool\n            work offline? If `True`, only locally existing files are considered\n            and no online check is performed. If a file is missing, an error is\n            raised. For this to work, the function needs to be run in `online`\n            mode once to create a local index.\n        \n        Returns\n        -------\n        list[str] or None\n            the names of the obtained files or None if a VRT file was defined\n        \"\"\"\n        keys = self.config.keys()\n        if dem_type not in keys:\n            options = ', '.join(keys)\n            raise RuntimeError(f\"DEM type '{dem_type}' is not supported.\\n  \"\n                               f\"possible options: '{options}'\")\n        \n        products = self.config[dem_type]['pattern'].keys()\n        if product not in products:\n            options = ', '.join(products)\n            raise RuntimeError(f\"Product '{product}' is not available \"\n                               f\"for DEM type '{dem_type}'.\\n\"\n                               f\"  options: '{options}'\")\n        \n        outdir = os.path.join(self.auxdatapath, 'dem', dem_type)\n        \n        if self.geometries is not None:\n            candidates = []\n            for geo in self.geometries:\n                corners = self.__applybuffer(extent=geo.extent, buffer=buffer)\n                candidates.extend(self.remote_ids(extent=corners, dem_type=dem_type,\n                                                  username=username, password=password,\n                                                  product=product))\n        else:\n            candidates = self.remote_ids(extent=None, dem_type=dem_type,\n                                         username=username, password=password,\n                                         product=product)\n        \n        if self.config[dem_type]['url'].startswith('ftp'):\n            port = 0\n            if 'port' in self.config[dem_type].keys():\n                port = self.config[dem_type]['port']\n            locals = self.__retrieve_ftp(url=self.config[dem_type]['url'],\n                                         filenames=candidates,\n                                         outdir=outdir, username=username,\n                                         password=password, port=port,\n                                         lock_timeout=lock_timeout,\n                                         offline=offline)\n        else:\n            locals = self.__retrieve(urls=candidates, outdir=outdir,\n                                     lock_timeout=lock_timeout,\n                                     offline=offline)\n        \n        resolution = None\n        datatype = None\n        src_nodata = None\n        dst_nodata = None\n        tap = False\n        extent = self.__commonextent(buffer=buffer)\n        aop = self.config[dem_type]['area_or_point']\n        res = self.__get_resolution(dem_type=dem_type, y=extent['ymin'])\n        if not crop:\n            f = self.config[dem_type]['tilesize']\n            extent['xmin'] = floor(extent['xmin'] / f) * f\n            extent['ymin'] = floor(extent['ymin'] / f) * f\n            extent['xmax'] = ceil(extent['xmax'] / f) * f\n            extent['ymax'] = ceil(extent['ymax'] / f) * f\n        if aop == 'point':\n            shift_x = res[0] / 2\n            shift_y = res[1] / 2\n            extent['xmin'] -= shift_x\n            extent['ymin'] += shift_y\n            extent['xmax'] -= shift_x\n            extent['ymax'] += shift_y\n        \n        # special case where no DEM tiles were found because the AOI is completely over ocean\n        if len(locals) == 0 and vrt is not None:\n            # define a dummy file as source file\n            # his file contains one pixel with a value of 0\n            # nodata value is 255\n            tif = vrt.replace('.vrt', '_tmp.tif')\n            self.__create_dummy_dem(filename=tif, extent=extent)\n            locals = [tif]\n            datatype = self.config[dem_type]['datatype'][product]\n            src_nodata = 0  # define the data value as nodata, so it can be overwritten in the VRT\n            if product == 'dem':\n                dst_nodata = 0\n            else:\n                dst_nodata = self.config[dem_type]['nodata'][product]\n            # determine the target resolution based on minimum latitude\n            resolution = self.__get_resolution(dem_type=dem_type, y=extent['ymin'])\n        \n        # make sure all GETASSE30 tiles get an ENVI HDR file so that they are GDAL-readable\n        if dem_type == 'GETASSE30':\n            for item in locals:\n                getasse30_hdr(item)\n        \n        if vrt is not None:\n            if src_nodata is None:\n                src_nodata = self.config[dem_type]['nodata'][product]\n            if dst_nodata is None:\n                dst_nodata = 0 if product == 'dem' else None\n            \n            self.__buildvrt(tiles=locals, vrtfile=vrt,\n                            pattern=self.config[dem_type]['pattern'][product],\n                            vsi=self.config[dem_type]['vsi'],\n                            extent=extent,\n                            src_nodata=src_nodata, dst_nodata=dst_nodata,\n                            hide_nodata=True,\n                            resolution=resolution,\n                            tap=tap, dst_datatype=datatype)\n        else:\n            return locals\n    \n    def remote_ids(self, extent, dem_type, product='dem', username=None, password=None):\n        \"\"\"\n        parse the names/URLs of the remote files overlapping with an area of interest\n\n        Parameters\n        ----------\n        extent: dict or None\n            the extent of the area of interest with keys xmin, xmax, ymin, ymax\n            or `None` to not set any spatial filter.\n        dem_type: str\n            the type fo DEM to be used\n        product: str\n            the sub-product to extract from the DEM product. Only needed for DEM options 'Copernicus 30m Global DEM'\n            and 'Copernicus 90m Global DEM' and ignored otherwise.\n        username: str or None\n            the download account username\n        password: str or None\n            the download account password\n\n        Returns\n        -------\n        str\n            the sorted names of the remote files\n        \"\"\"\n        keys = self.config.keys()\n        if dem_type not in keys:\n            raise RuntimeError(\"demType '{}' is not supported\\n  \"\n                               \"possible options: '{}'\"\n                               .format(dem_type, \"', '\".join(keys)))\n        \n        def ids(\n                x: int | None = None,\n                y: int | None = None,\n                nx: int = 3,\n                ny: int = 3,\n                reverse: bool = False\n        ) -> tuple[str, str]:\n            if reverse:\n                pattern = '{c:0{n}d}{id}'\n            else:\n                pattern = '{id}{c:0{n}d}'\n            if x is not None:\n                xf = pattern.format(id='W' if x < 0 else 'E', c=abs(x), n=nx)\n            else:\n                xf = ''\n            if y is not None:\n                yf = pattern.format(id='S' if y < 0 else 'N', c=abs(y), n=ny)\n            else:\n                yf = ''\n            return yf, xf\n        \n        def remotes_from_index(\n                indices: list[tuple[str, str]],\n                product: str | None\n        ) -> list[str]:\n            lookup = self.__local_index(dem_type=dem_type)\n            remotes = []\n            for y, x in indices:\n                try:\n                    if product is None:\n                        remotes.append(lookup[y][x])\n                    else:\n                        remotes.append(lookup[y][x][product])\n                except KeyError:\n                    pass\n            return remotes\n        \n        if dem_type in ['Copernicus 30m Global DEM',\n                        'Copernicus 90m Global DEM',\n                        'SRTM 1Sec HGT']:\n            lat, lon = self.intrange(extent, step=1)\n            indices = [ids(x, y, nx=3, ny=2)\n                       for x in lon for y in lat]\n            remotes = remotes_from_index(indices, product=product)\n        \n        elif dem_type == 'GETASSE30':\n            lat, lon = self.intrange(extent, step=15)\n            indices = [ids(x, y, nx=3, ny=2, reverse=True)\n                       for x in lon for y in lat]\n            remotes = remotes_from_index(indices, product=product)\n        \n        elif dem_type == 'TDX90m':\n            lat, lon = self.intrange(extent, step=1)\n            remotes = []\n            for x in lon:\n                xr = abs(x) // 10 * 10\n                for y in lat:\n                    yf, xf = ids(x=x, y=y, nx=3, ny=2)\n                    remotes.append('DEM/{y}/{hem}{xr:03d}/TDM1_DEM__30_{y}{x}.zip'\n                                   .format(x=xf, xr=xr, y=yf, hem=xf[0]))\n        \n        elif dem_type == 'AW3D30':\n            remotes = []\n            lat, lon = self.intrange(extent, step=1)\n            for x in lon:\n                for y in lat:\n                    remotes.append(\n                        '{0}{1}/{2}{3}.tar.gz'.format(*ids(x // 5 * 5, y // 5 * 5),\n                                                      *ids(x, y)))\n        \n        elif dem_type == 'SRTM 3Sec':\n            lat = range(\n                floor((60 - float(extent['ymax'])) / 5) + 1,\n                ceil((60 - float(extent['ymin'])) / 5) + 1\n            )\n            lon = range(\n                floor((float(extent['xmin']) + 180) / 5) + 1,\n                ceil((float(extent['xmax']) + 180) / 5) + 1\n            )\n            indices = [(f'{y:02d}', f'{x:02d}') for x in lon for y in lat]\n            remotes = remotes_from_index(indices, product=product)\n        \n        elif dem_type in ['Copernicus 10m EEA DEM',\n                          'Copernicus 30m Global DEM II',\n                          'Copernicus 90m Global DEM II']:\n            lat, lon = self.intrange(extent, step=1)\n            indices = [''.join(ids(x, y, nx=3, ny=2))\n                       for x in lon for y in lat]\n            \n            outdir = os.path.join(self.auxdatapath, 'dem', dem_type)\n            mapping = os.path.join(outdir, 'mapping.csv')\n            mapping2 = os.path.join(outdir, 'mapping_append.csv')\n            \n            def ftp_search(ftp, target):\n                out = []\n                if target.endswith('/'):\n                    print(target)\n                    content = ftp.nlst(target)\n                    for item in content:\n                        out.extend(ftp_search(ftp, target + item))\n                else:\n                    if target.endswith('DEM.tar'):\n                        out.append(target.to_str('latin-1').decode('utf-8'))\n                return out\n            \n            def ftp_connect(host, path, username, password, port=990):\n                ftp = ImplicitFTP_TLS()\n                ftp.connect(host=host, port=port)\n                ftp.login(username, password)\n                ftp.cwd(path)\n                return ftp\n            \n            if not os.path.isfile(mapping2):\n                parsed = urlparse(self.config[dem_type]['url'])\n                host = parsed.netloc\n                path = parsed.path\n                ftp = None\n                os.makedirs(outdir, exist_ok=True)\n                if not os.path.isfile(mapping):\n                    print('downloading mapping.csv')\n                    ftp = ftp_connect(host, path, username, password,\n                                      port=self.config[dem_type]['port'])\n                    with open(mapping, 'wb') as myfile:\n                        ftp.retrbinary('RETR mapping.csv', myfile.write)\n                print('searching FTP server')\n                if ftp is None:\n                    ftp = ftp_connect(host, path, username, password,\n                                      port=self.config[dem_type]['port'])\n                files = ftp_search(ftp, path + '/')\n                files_base = [os.path.basename(x) for x in files]\n                if ftp is not None:\n                    ftp.quit()\n                print('matching found files with mapping.csv')\n                with open(mapping) as obj:\n                    reader = csv.reader(obj, delimiter=';')\n                    with open(mapping2, 'w', newline='') as out:\n                        writer = csv.writer(out, delimiter=';')\n                        writer.writerow(next(reader))  # write header\n                        for row in reader:\n                            index = files_base.index(row[0])\n                            row.append(files[index])\n                            del files_base[index]\n                            del files[index]\n                            writer.writerow(row)\n            remotes = []\n            with open(mapping2) as obj:\n                stream = csv.reader(obj, delimiter=';')\n                for row in stream:\n                    if row[1] + row[2] in indices:\n                        remotes.append(row[-1])\n        else:\n            raise ValueError('unknown demType: {}'.format(dem_type))\n        \n        return sorted(remotes)\n\n\ndef getasse30_hdr(fname):\n    \"\"\"\n    create an ENVI HDR file for zipped GETASSE30 DEM tiles\n    \n    Parameters\n    ----------\n    fname: str\n        the name of the zipped tile\n\n    Returns\n    -------\n\n    \"\"\"\n    basename = os.path.basename(fname)\n    pattern = r'(?P<lat>[0-9]{2})' \\\n              '(?P<ns>[A-Z])' \\\n              '(?P<lon>[0-9]{3})' \\\n              '(?P<ew>[A-Z]).zip'\n    match = re.search(pattern, basename).groupdict()\n    \n    lon = float(match['lon'])\n    if match['ew'] == 'W':\n        lon *= -1\n    lat = float(match['lat'])\n    if match['ns'] == 'S':\n        lat *= -1\n    posting = 30 / 3600  # 30 arc seconds\n    pixels = 1800\n    \n    map_info = ['Geographic Lat/Lon', '1.0000', '1.0000',\n                str(lon),\n                str(lat + pixels * posting),\n                str(posting),\n                str(posting),\n                'WGS-84', 'units=Degrees']\n    \n    with zf.ZipFile(fname, 'a') as zip:\n        files = zip.namelist()\n        hdr = basename.replace('.zip', '.hdr')\n        if hdr not in files:\n            with HDRobject() as obj:\n                obj.samples = pixels\n                obj.lines = pixels\n                obj.byte_order = 1\n                obj.data_type = 2\n                obj.map_info = '{{{}}}'.format(','.join(map_info))\n                obj.coordinate_system_string = crsConvert(4326, 'wkt')\n                zip.writestr(hdr, str(obj))\n\n\ndef get_dem_options(require_auth=None):\n    \"\"\"\n    Get the names of all supported DEM type options.\n    \n    Parameters\n    ----------\n    require_auth: bool or None\n        only return options that do/don't require authentication. Default None: return all options.\n\n    Returns\n    -------\n    list[str]\n        the names of the DEM options\n    \"\"\"\n    out = []\n    # create a dummy vector geometry for initializing the DEMHandler\n    ext = {'xmin': -44, 'xmax': -43, 'ymin': 30, 'ymax': 31}\n    with bbox(coordinates=ext, crs=4326) as vec:\n        with DEMHandler(geometries=[vec]) as handler:\n            for key, properties in handler.config.items():\n                if require_auth is None:\n                    out.append(key)\n                else:\n                    if require_auth == properties['authentication']:\n                        out.append(key)\n            return sorted(out)\n\n\ndef get_egm_lookup(geoid, software):\n    \"\"\"\n    Download lookup tables for converting EGM geoid heights to WGS84 ellipsoid heights.\n    \n    Parameters\n    ----------\n    geoid: str\n        the geoid model; current options:\n        \n        - SNAP: 'EGM96'\n        - PROJ: 'EGM96', 'EGM2008'\n    software: str\n        the software for which to download the EGM lookup\n        \n        - SNAP: default directory: ``~/.snap/auxdata/dem/egm96``; URL:\n        \n          * https://step.esa.int/auxdata/dem/egm96/ww15mgh_b.zip\n        - PROJ: requires ``PROJ_DATA`` or ``PROJ_LIB`` environment variable to be set as download directory; URLs:\n        \n          * https://cdn.proj.org/us_nga_egm96_15.tif\n          * https://cdn.proj.org/us_nga_egm08_25.tif\n\n    Returns\n    -------\n\n    \"\"\"\n    if software == 'SNAP':\n        try:\n            auxdatapath = ExamineSnap().auxdatapath\n        except AttributeError:\n            auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')\n        local = os.path.join(auxdatapath, 'dem', 'egm96', 'ww15mgh_b.zip')\n        os.makedirs(os.path.dirname(local), exist_ok=True)\n        if not os.path.isfile(local):\n            remote = 'https://step.esa.int/auxdata/dem/egm96/ww15mgh_b.zip'\n            log.info('{} <<-- {}'.format(local, remote))\n            r = requests.get(remote)\n            r.raise_for_status()\n            with open(local, 'wb') as out:\n                out.write(r.content)\n            r.close()\n    \n    elif software == 'PROJ':\n        lookup = {'EGM96': 'us_nga_egm96_15.tif',\n                  'EGM2008': 'us_nga_egm08_25.tif'}\n        remote = 'https://cdn.proj.org/' + lookup[geoid]\n        \n        # starting with PROJ 9.1, the PROJ_DATA variable is used.\n        # Earlier versions make use of PROJ_LIB.\n        var = 'PROJ_DATA'\n        proj_dir = os.environ.get(var)\n        if proj_dir is None:\n            var = 'PROJ_LIB'\n            proj_dir = os.environ.get(var)\n        if proj_dir is not None:\n            local = os.path.join(proj_dir, os.path.basename(remote))\n            if not os.path.isfile(local):\n                if not os.access(proj_dir, os.W_OK):\n                    raise OSError(\"cannot write to '{0}' path: {1}\".format(var, proj_dir))\n                log.info('{} <<-- {}'.format(local, remote))\n                r = requests.get(remote)\n                r.raise_for_status()\n                with open(local, 'wb') as out:\n                    out.write(r.content)\n                r.close()\n        else:\n            raise RuntimeError(\"Neither environment variable 'PROJ_DATA' nor 'PROJ_LIB' are set\")\n    else:\n        raise TypeError(\"software must be either 'SNAP' or 'PROJ'\")\n\n\nclass ImplicitFTP_TLS(ftplib.FTP_TLS):\n    \"\"\"\n    FTP_TLS subclass that automatically wraps sockets in SSL to support implicit FTPS.\n    taken from https://stackoverflow.com/a/36049814\n    \"\"\"\n    \n    def __init__(self, *args, **kwargs):\n        super().__init__(*args, **kwargs)\n        self._sock = None\n    \n    @property\n    def sock(self):\n        \"\"\"Return the socket.\"\"\"\n        return self._sock\n    \n    @sock.setter\n    def sock(self, value):\n        \"\"\"When modifying the socket, ensure that it is ssl wrapped.\"\"\"\n        if value is not None and not isinstance(value, ssl.SSLSocket):\n            value = self.context.wrap_socket(value)\n        self._sock = value\n\n\ndef vrt_check_sources(fname):\n    \"\"\"\n    check the sanity of all source files of a given VRT.\n    Currently does not check in-memory VRTs.\n    \n    Parameters\n    ----------\n    fname: str\n        the VRT file name\n\n    Returns\n    -------\n    \n    Raises\n    ------\n    RuntimeError\n    \"\"\"\n    if os.path.isfile(fname):\n        tree = etree.parse(fname)\n        sources = [x.text for x in tree.findall('.//SourceFilename')]\n        for source in sources:\n            if not os.path.isabs(source):\n                base_dir = os.path.dirname(fname)\n                source = os.path.normpath(os.path.join(base_dir, source))\n            if not os.path.isfile(source):\n                raise RuntimeError(f'missing VRT source file: {source}')\n"
  },
  {
    "path": "pyroSAR/config.py",
    "content": "# -*- coding: utf-8 -*-\n###############################################################################\n# pyroSAR configuration handling\n\n# Copyright (c) 2018-2024, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\nimport os\nimport json\n\nimport configparser as ConfigParser\n\n__LOCAL__ = ['acquisition_mode', 'coordinates', 'cycleNumber', 'frameNumber',\n             'lines', 'orbit', 'orbitNumber_abs', 'orbitNumber_rel',\n             'polarizations', 'product', 'projection', 'samples',\n             'sensor', 'spacing', 'start', 'stop']\n\n\nclass Singleton(type):\n    \"\"\"\n    Define an Instance operation that lets clients access its unique instance.\n    https://sourcemaking.com/design_patterns/singleton/python/1\n    \"\"\"\n    \n    def __init__(cls, name, bases, attrs, **kwargs):\n        super().__init__(name, bases, attrs)\n        cls._instance = None\n    \n    def __call__(cls, *args, **kwargs):\n        if cls._instance is None:\n            cls._instance = super().__call__(*args, **kwargs)\n        return cls._instance\n\n\nclass ConfigHandler(metaclass=Singleton):\n    \"\"\"\n    ConfigHandler is a configuration handler for pyroSAR. It is intended to be called by a class's '__init__' and\n    set or get the configuration parameters throughout an entire package.\n    The primary goal with ConfigHandler is to load a single, consistent configuration environment to be passed \n    amongst ALL objects within a package.\n        \n    ConfigHandler is a SINGLETON, meaning once instantiated, THE SAME OBJECT\n    will be returned to every class object calling it.\n\n    Parameters\n    ----------\n    path : str or None\n        A path where the .pyrosar directory will be created. If None (default) it will be created in the user home\n        directory.\n    config_fname : str\n        Name of the config file. Default is 'config.ini'.\n    \n    Methods\n    -------\n    make_dir : Create a .pyrosar directory in home directory.\n    create_config : Create a config.ini file in .pyrosar directory.\n    open : Open the config.ini file.\n    add_section : Create a new section in the configuration.\n    set : Set an option in the configuration.\n    remove_option : Remove an option in the configuration.\n\n    Notes\n    -----\n    The syntax is the same as in ConfigParser. Here, keys are called options.\n\n    \"\"\"\n    \n    # Define __setter to control changeable keys (optional)\n    # __setter = [\"etc\", \"auxdata\"]\n    \n    def __init__(self):\n        path = os.path.join(os.path.expanduser('~'), '.pyrosar')\n        \n        self.__GLOBAL = {\n            'path': path,\n            'config_fname': 'config.ini',\n            'config': os.path.join(path, 'config.ini'),\n        }\n        \n        if not os.path.isfile(self.__GLOBAL['config']):\n            self.__create_config()\n        \n        self.parser = ConfigParser.RawConfigParser(allow_no_value=True)\n        self.parser.optionxform = str\n        self.parser.read(self.__GLOBAL['config'])\n    \n    def __create_config(self):\n        \"\"\"\n        Create a config.ini file in .pyrosar directory.\n\n        Returns\n        -------\n        None\n        \"\"\"\n        \n        if not os.path.exists(self.__GLOBAL['path']):\n            os.makedirs(self.__GLOBAL['path'])\n        \n        with open(self.__GLOBAL['config'], 'w'):\n            pass\n    \n    def __str__(self):\n        items = []\n        for section in self.parser.sections():\n            items.append('  Section: {0}\\n'.format(section))\n            \n            for options in self.parser.options(section):\n                items.append('    x {0} :: {1} :: {2}\\n'\n                             .format(options,\n                                     self.parser.get(section, options),\n                                     str(type(options))))\n        out = f'Class    : {self.__class__.__name__}\\n' \\\n              f'Path     : {self.__GLOBAL[\"config\"]}\\n' \\\n              f'Sections : {len(self.parser.sections())}\\n' \\\n              f'Contents : \\n{\"\".join(items)}'\n        \n        return out\n    \n    def __getitem__(self, section):\n        if not self.parser.has_section(section):\n            raise AttributeError('Section {0} does not exist.'.format(str(section)))\n        return dict(self.parser.items(section))\n    \n    @property\n    def sections(self):\n        return self.parser.sections()\n    \n    def keys(self, section):\n        \"\"\"\n        Get all keys (options) of a section.\n\n        Parameters\n        ----------\n        section : str\n            Section name.\n\n        Returns\n        -------\n        list : options (keys) of a section.\n\n        \"\"\"\n        return self.parser.options(section)\n    \n    def open(self):\n        \"\"\"\n        Open the config.ini file. This method will open the config.ini\n        file in an external standard app (text editor).\n\n        Returns\n        -------\n        os.startfile\n\n        \"\"\"\n        \n        os.startfile(self.__GLOBAL['config'])\n    \n    def add_section(self, section):\n        \"\"\"\n        Create a new section in the configuration.\n\n        Parameters\n        ----------\n        section : str\n            Section name\n\n        Returns\n        -------\n        None\n\n        \"\"\"\n        if not self.parser.has_section(section):\n            self.parser.add_section(section)\n            self.write()\n        else:\n            raise RuntimeError('section already exists')\n    \n    @property\n    def file(self):\n        return self.__GLOBAL['config']\n    \n    def set(self, section, key, value, overwrite=False):\n        \"\"\"\n        Set an option.\n\n        Parameters\n        ----------\n        section : str\n            Section name.\n        key : str\n            the attribute name\n        value :\n            the attribute value\n        overwrite : bool\n            If True and the defined key exists the value will be overwritten.\n\n        Returns\n        -------\n\n        \"\"\"\n        if not self.parser.has_section(section):\n            raise AttributeError('Section {0} does not exist.'.format(str(section)))\n        \n        if isinstance(value, list):\n            value = json.dumps(value)\n        \n        if key in self.parser.options(section) and not overwrite:\n            raise RuntimeError('Value already exists.')\n        \n        self.parser.set(section, key, value)\n        self.write()\n    \n    def remove_option(self, section, key):\n        \"\"\"\n        Remove an option and key.\n\n        Parameters\n        ----------\n        section : str\n            Section name.\n        key : str\n            Key value.\n        \n        Returns\n        -------\n        \n        \"\"\"\n        if not self.parser.has_section(section):\n            raise AttributeError('Section {0} does not exist.'.format(str(section)))\n        \n        if key not in self.parser.options(section):\n            raise AttributeError('Key {0} does not exist.'.format(str(key)))\n        \n        self.parser.remove_option(section, key)\n        self.write()\n    \n    def remove_section(self, section):\n        \"\"\"\n        remove a section\n        \n        Parameters\n        ----------\n        section: str\n            Section name.\n\n        Returns\n        -------\n\n        \"\"\"\n        self.parser.remove_section(section)\n        self.write()\n    \n    def write(self):\n        with open(self.__GLOBAL['config'], 'w', encoding='utf8') as out:\n            self.parser.write(out)\n"
  },
  {
    "path": "pyroSAR/datacube_util.py",
    "content": "###############################################################################\n# Convenience tools for Open Data Cube ingestion\n\n# Copyright (c) 2018-2019, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\n\"\"\"\nThis (still experimental) module is intended to easily prepare SAR scenes processed\nby pyroSAR for ingestion into an Open Data Cube.\n\n.. code-block:: python\n\n    from pyroSAR.datacube_util import Product, Dataset\n    from pyroSAR.ancillary import find_datasets\n    \n    # find pyroSAR files by metadata attributes\n    archive_s1 = '/.../sentinel1/GRD/processed'\n    scenes_s1 = find_datasets(archive_s1, sensor=('S1A', 'S1B'), acquisition_mode='IW')\n    \n    # group the found files by their file basenames\n    # files with the same basename are considered to belong to the same dataset\n    grouped = groupby(scenes_s1, 'outname_base')\n    \n    # define the polarization units describing the data sets\n    units = {'VV': 'backscatter VV', 'VH': 'backscatter VH'}\n    \n    # create a new product\n    with Product(name='S1_GRD_index',\n                 product_type='gamma0',\n                 description='Gamma Naught RTC backscatter') as prod:\n        \n        for dataset in grouped:\n            with Dataset(dataset, units=units) as ds:\n                \n                # add the dataset to the product\n                prod.add(ds)\n                \n                # parse datacube indexing YMLs from product and data set metadata\n                prod.export_indexing_yml(ds, 'yml_index_outdir')\n        \n        # write the product YML\n        prod.write('yml_product')\n        \n        # print the product metadata which is written to the product YML\n        print(prod)\n\"\"\"\n\nimport os\nimport re\nimport yaml\nimport uuid\nfrom time import strftime, strptime\nfrom spatialist.raster import Raster, Dtype\nfrom spatialist.ancillary import union\nfrom .ancillary import parse_datasetname\n\nimport logging\nlog = logging.getLogger(__name__)\n\n\nclass Dataset(object):\n    \"\"\"\n    A general class describing dataset information required for creating ODC YML files\n    \n    Parameters\n    ----------\n    filename: str, list, Dataset\n        the product to be used; either an existing :class:`Dataset` object or a (list of) file(s) matching the pyroSAR\n        naming pattern, i.e. that can be parsed by :func:`pyroSAR.ancillary.parse_datasetname`\n    units: str or dict\n        the units of the product measurement\n    \"\"\"\n    def __init__(self, filename, units='DN'):\n        \n        if isinstance(filename, list):\n            combined = sum([Dataset(x, units) for x in filename])\n            self.__init__(combined)\n        \n        elif isinstance(filename, Dataset):\n            for attr, value in vars(filename).items():\n                setattr(self, attr, value)\n        \n        elif isinstance(filename, str):\n            # map pyroSAR sensor identifiers to platform and instrument codes\n            sensor_lookup = {'ASAR': ('ENVISAT', 'ASAR'),\n                             'ERS1': ('ERS-1', 'SAR'),\n                             'ERS2': ('ERS-2', 'SAR'),\n                             'PSR1': ('ALOS-1', 'PALSAR'),\n                             'PSR2': ('ALOS-2', 'PALSAR-2'),\n                             'S1A': ('SENTINEL-1', 'C-SAR'),\n                             'S1B': ('SENTINEL-1', 'C-SAR'),\n                             'S1C': ('SENTINEL-1', 'C-SAR'),\n                             'S1D': ('SENTINEL-1', 'C-SAR'),\n                             'TSX1': ('TERRASAR-X_1', 'SAR'),\n                             'TDX1': ('TANDEM-X_1', 'SAR')}\n            \n            # extract basic metadata attributes from the filename and register them to the object\n            meta = parse_datasetname(filename)\n            \n            if meta is None:\n                raise ValueError('could not identify dataset: {}'.format(filename))\n            \n            for key, val in meta.items():\n                setattr(self, key, val)\n            \n            # define acquisition start and end time; Currently both are set to the acquisition start time,\n            # which is contained in the filename\n            # Time will only be correct if the full scene was processed, start and end time of s subset will\n            # differ. Thus, accurately setting both is not seen as too relevant.\n            self.from_dt = strftime('%Y-%m-%dT%H:%M:%S', strptime(self.start, '%Y%m%dT%H%M%S'))\n            self.to_dt = strftime('%Y-%m-%dT%H:%M:%S', strptime(self.start, '%Y%m%dT%H%M%S'))\n            \n            # match the sensor ID from the filename to a platform and instrument\n            if self.sensor not in sensor_lookup.keys():\n                raise ValueError('unknown sensor: {}'.format(self.sensor))\n            \n            self.platform, self.instrument = sensor_lookup[self.sensor]\n            \n            # extract general geo metadata from the GTiff information\n            with Raster(filename) as ras:\n                self.dtype = Dtype(ras.dtype).numpystr\n                self.nodata = ras.nodata\n                self.format = ras.format\n                self.xres, self.yres = ras.res\n                self.crs = 'EPSG:{}'.format(ras.epsg)\n                self.is_projected = ras.projcs is not None\n                self.extent = self.__extent_convert(ras.geo, 'x', 'y')\n                # reproject the raster bounding box to EPSG 4326 and store its extent\n                with ras.bbox() as bbox:\n                    bbox.reproject(4326)\n                    self.extent_4326 = self.__extent_convert(bbox.extent, 'lon', 'lat')\n            \n            # create dictionary for resolution metadata depending on CRS characteristics\n            resolution_keys = ('x', 'y') if self.is_projected else ('longitude', 'latitude')\n            self.resolution = dict(zip(resolution_keys, (self.xres, self.yres)))\n            \n            # check whether the data type is supported\n            pattern = '(?:(?:u|)int(?:8|16|32|64)|float(?:32|64))'\n            if not re.search(pattern, self.dtype):\n                raise ValueError('unsupported data type {}'.format(self.dtype))\n            \n            # determine the dataset units\n            if isinstance(units, str):\n                units = units\n            elif isinstance(units, dict):\n                try:\n                    units = units[self.polarization]\n                except KeyError:\n                    raise KeyError(\"parameter 'units' does not contain key '{}'\".format(self.polarization))\n            else:\n                raise TypeError(\"parameter 'units' must be of type str or dict\")\n            \n            # create the measurement entry from collected metadata;\n            # this is intended for easy access by class Product\n            self.measurements = {self.polarization: {'dtype': self.dtype,\n                                                     'name': self.polarization,\n                                                     'nodata': self.nodata,\n                                                     'filename': filename,\n                                                     'units': units}}\n        else:\n            raise TypeError('filename must be of type str, list or Dataset')\n    \n    def __add__(self, dataset):\n        \"\"\"\n        override the + operator. This is intended to easily combine two Dataset objects, which were\n        created from different files belonging to the same measurement, e.g. two GeoTIFFs with one polarization\n        each.\n        \n        Parameters\n        ----------\n        dataset: Dataset\n            the dataset to add to the current one\n\n        Returns\n        -------\n        Dataset\n            the combination of the two\n        \"\"\"\n        for attr in ['extent', 'crs', 'sensor', 'acquisition_mode', 'proc_steps', 'outname_base']:\n            if getattr(self, attr) != getattr(dataset, attr):\n                raise ValueError('value mismatch: {}'.format(attr))\n        # self.filename.append(dataset.filename)\n        for key in dataset.measurements.keys():\n            if key in self.measurements.keys():\n                raise RuntimeError('only different measurements can be combined to one dataset')\n        self.measurements.update(dataset.measurements)\n        return self\n    \n    def __radd__(self, dataset):\n        \"\"\"\n        similar to :meth:`Dataset.__add__` but for function :func:`sum`, e.g. :code:`sum([Dataset1, Dataset2])`\n        \n        Parameters\n        ----------\n        dataset: Dataset\n            the dataset to add to the current one\n\n        Returns\n        -------\n        Dataset\n            the combination of the two\n        \"\"\"\n        if dataset == 0:\n            return self\n        else:\n            return self.__add__(dataset)\n    \n    @staticmethod\n    def __extent_convert(extent, xkey, ykey):\n        \"\"\"\n        convert the extent of a :class:`~spatialist.raster.Raster` object to a\n        datacube-compliant dictionary.\n        \n        Parameters\n        ----------\n        extent: dict\n            the extent as returned by a :class:`~spatialist.raster.Raster` object\n        xkey: {'longitude', 'x'}\n            the key of the x dimension\n        ykey: {'latitude', 'y'}\n            the key of the y dimension\n\n        Returns\n        -------\n        dict\n            a dictionary with keys `ll`, `lr`, `ul` and ``ur\n        \"\"\"\n        return {'ll': {xkey: extent['xmin'],\n                       ykey: extent['ymin']},\n                'lr': {xkey: extent['xmax'],\n                       ykey: extent['ymin']},\n                'ul': {xkey: extent['xmin'],\n                       ykey: extent['ymax']},\n                'ur': {xkey: extent['xmax'],\n                       ykey: extent['ymax']}}\n    \n    def __enter__(self):\n        return self\n    \n    def __exit__(self, exc_type, exc_val, exc_tb):\n        self.close()\n    \n    def __get_measurement_attr(self, attr):\n        \"\"\"\n        get a certain measurement attribute from all measurements\n        \n        Parameters\n        ----------\n        attr: str\n            the attribute to get\n\n        Returns\n        -------\n        dict\n            a dictionary with the measurement names as keys and the respective attribute as value\n        \"\"\"\n        return dict([(key, self.measurements[key][attr]) for key in self.measurements.keys()])\n    \n    @property\n    def filenames(self):\n        \"\"\"\n        \n        Returns\n        -------\n        dict\n            all file names registered in the dataset\n        \"\"\"\n        return self.__get_measurement_attr('filename')\n    \n    @property\n    def identifier(self):\n        \"\"\"\n        \n        Returns\n        -------\n        str\n            a unique dataset identifier\n        \"\"\"\n        return '{}_{}'.format(self.outname_base, '_'.join(self.proc_steps))\n    \n    @property\n    def units(self):\n        \"\"\"\n        \n        Returns\n        -------\n        dict\n            all measurement unit names registered in the dataset\n        \"\"\"\n        return self.__get_measurement_attr('units')\n    \n    @units.setter\n    def units(self, value):\n        \"\"\"\n        (re)set the units of all measurements\n        \n        Parameters\n        ----------\n        value: str or dict\n            the unit(s) to be set; if multiple measurements are present,\n            a dictionary with measurement names as keys needs to be defined\n\n        Returns\n        -------\n\n        \"\"\"\n        keys = list(self.measurements.keys())\n        if isinstance(value, str):\n            if len(keys) == 1:\n                self.measurements[keys[0]]['units'] = value\n            else:\n                raise TypeError('the dataset contains multiple measurements; '\n                                'in this case a dictionary is needed for setting the measurement units')\n        elif isinstance(value, dict):\n            for name, unit in value.items():\n                if name in keys:\n                    self.measurements[name]['units'] = unit\n                else:\n                    raise KeyError(\"the dataset does not contain a measurement '{}'\".format(name))\n    \n    def close(self):\n        return\n\n\nclass Product(object):\n    \"\"\"\n    A class for describing an ODC product definition\n    \n    Parameters\n    ----------\n    definition: str, list, None\n        the source of the product definition; either an existing product YML, a list of :class:`Dataset` objects,\n        or None. In the latter case the product is defined using the parameters\n        `name`, `product_type` and `description`.\n    name: str\n        the name of the product in the data cube\n    product_type: str\n        the type of measurement defined in the product, e.g. `gamma0`\n    description: str\n        the description of the product and its measurements\n    \"\"\"\n    def __init__(self, definition=None, name=None, product_type=None,\n                 description=None):\n        \n        missing_message = \"when initializing {}, parameters \" \\\n                          \"'name', 'product_type' and 'description' must be defined\"\n        \n        if isinstance(definition, str):\n            if os.path.isfile(definition):\n                with open(definition, 'r') as yml:\n                    try:\n                        self.meta = yaml.load(yml, Loader=yaml.FullLoader)\n                    except yaml.YAMLError:\n                        raise RuntimeError('the provided file does not seem to be a YAML file')\n            else:\n                raise RuntimeError('definition file does not exist')\n        \n        elif isinstance(definition, list):\n            if None in [name, product_type, description]:\n                raise ValueError(missing_message.format(' a product from list'))\n            self.__initialize(name, product_type, description)\n            for dataset in definition:\n                with Dataset(dataset) as DS:\n                    self.add(DS)\n        \n        elif definition is None:\n            if None in [name, product_type, description]:\n                raise ValueError(missing_message.format('a blank product'))\n            self.__initialize(name, product_type, description)\n        else:\n            raise TypeError('type of parameter definition must be either str, list or None')\n    \n    def __enter__(self):\n        return self\n    \n    def __exit__(self, exc_type, exc_val, exc_tb):\n        self.close()\n    \n    def __str__(self):\n        return yaml.dump(self.meta, default_flow_style=False)\n    \n    def __getattr__(self, item):\n        if item in self.__fixture_storage:\n            return self.meta['storage'][item]\n        elif item in self.__fixture_metadata:\n            subkey = 'code' if item == 'platform' else 'name'\n            return self.meta['metadata'][item][subkey]\n        elif item == 'product_type':\n            return self.meta['metadata']['product_type']\n        else:\n            return object.__getattribute__(self, item)\n    \n    def __setattr__(self, key, value):\n        if key in self.__fixture_storage:\n            self.meta['storage'][key] = value\n        elif key in self.__fixture_metadata:\n            subkey = 'code' if key == 'platform' else 'name'\n            self.meta['metadata'][key][subkey] = value\n        elif key == 'product_type':\n            self.meta['metadata']['product_type'] = value\n        else:\n            super(Product, self).__setattr__(key, value)\n    \n    def close(self):\n        return\n    \n    def __add_measurement(self, name, dtype, nodata, units):\n        \"\"\"\n        create a new measurement entry\n        \n        Parameters\n        ----------\n        name: str\n            the measurement name\n        dtype: str\n            the data type, e.g. float32\n        nodata: int or float\n            the nodata value of the data\n        units: str\n            the measurement units\n\n        Returns\n        -------\n\n        \"\"\"\n        if name in self.measurements.keys():\n            raise IndexError('measurement {} already exists'.format(name))\n        self.meta['measurements'].append({'name': name,\n                                          'dtype': dtype,\n                                          'units': units,\n                                          'nodata': nodata})\n    \n    def __initialize(self, name, product_type, description):\n        \"\"\"\n        create a new blank product\n        \n        Parameters\n        ----------\n        name: str\n            the name of the product\n        product_type: str\n            the product type, e.g. `gamma0`\n        description: str\n            a description of the product content/purpose\n\n        Returns\n        -------\n\n        \"\"\"\n        self.meta = {'description': description,\n                     'measurements': [],\n                     'metadata': {'platform': {'code': None},\n                                  'instrument': {'name': None},\n                                  'format': {'name': None},\n                                  'product_type': product_type},\n                     'metadata_type': 'eo',\n                     'name': name,\n                     'storage': {'crs': None,\n                                 'resolution': None}}\n    \n    @staticmethod\n    def __check_dict_keys(keys, reference):\n        return len(union(keys, reference)) == len(keys)\n    \n    @property\n    def __fixture_fields(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list\n            the names of the top-level metadata fields, which must be defined\n        \"\"\"\n        return ['description', 'measurements', 'metadata', 'metadata_type', 'name', 'storage']\n    \n    @property\n    def __fixture_measurement(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list\n            the names of the metadata fields, which must be defined for all measurements\n        \"\"\"\n        return ['dtype', 'nodata', 'units']\n    \n    @property\n    def __fixture_metadata(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list\n            the names of the metadata fields, which must be defined in the general metadata section\n        \"\"\"\n        return ['format', 'instrument', 'platform']\n    \n    @property\n    def __fixture_storage(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list\n            the names of the metadata fields, which must be defined for the storage section\n        \"\"\"\n        return ['crs', 'resolution']\n    \n    def __validate(self):\n        \"\"\"\n        assert whether the Product is valid\n        \n        Returns\n        -------\n        \n        Raises\n        ------\n        RuntimeError\n        \"\"\"\n        try:\n            assert isinstance(self.meta, dict)\n            assert self.__check_dict_keys(self.__fixture_fields, self.meta.keys())\n            assert 'product_type' in self.meta['metadata'].keys()\n            for measurement in self.meta['measurements']:\n                assert self.__check_dict_keys(self.__fixture_measurement, measurement.keys())\n        except AssertionError as e:\n            log.info(e)\n            raise RuntimeError('product invalid')\n    \n    def add(self, dataset):\n        \"\"\"\n        Add a dataset to the abstracted product description. This first performs a check\n        whether the dataset is compatible with the product and its already existing measurements.\n        If a measurement in the dataset does not yet exist in the product description it is added.\n        \n        Parameters\n        ----------\n        dataset: Dataset\n            the dataset whose description is to be added\n\n        Returns\n        -------\n\n        \"\"\"\n        if not isinstance(dataset, Dataset):\n            raise TypeError('input must be of type pyroSAR.datacube.Dataset')\n        self.check_integrity(dataset, allow_new_measurements=True)\n        \n        # set the general product definition attributes if they are None\n        for attr in self.__fixture_metadata + self.__fixture_storage:\n            if getattr(self, attr) is None:\n                setattr(self, attr, getattr(dataset, attr))\n        \n        # if it is not yet present, add the dataset measurement definition to that of the product\n        for measurement, content in dataset.measurements.items():\n            if measurement not in self.measurements.keys():\n                self.__add_measurement(dtype=content['dtype'],\n                                       name=content['name'],\n                                       nodata=content['nodata'],\n                                       units=content['units'])\n    \n    def check_integrity(self, dataset, allow_new_measurements=False):\n        \"\"\"\n        check if a dataset is compatible with the product definition.\n        \n        Parameters\n        ----------\n        dataset: Dataset\n            the dataset to be checked\n        allow_new_measurements: bool\n            allow new measurements to be added to the product definition?\n            If not and the dataset contains measurements,\n            which are not defined in the product, an error is raised.\n\n        Returns\n        -------\n        \n        Raises\n        ------\n        RuntimeError\n        \"\"\"\n        # check general metadata and storage fields\n        for attr in self.__fixture_metadata + self.__fixture_storage:\n            val_ds = getattr(dataset, attr)\n            val_prod = getattr(self, attr)\n            if val_prod is not None and val_ds != val_prod:\n                raise RuntimeError(\"mismatch of attribute '{0}': {1}, {2}\".format(attr, val_ds, val_prod))\n        \n        # check measurement fields\n        for measurement, content in dataset.measurements.items():\n            if measurement not in self.measurements.keys():\n                if not allow_new_measurements:\n                    raise RuntimeError(\"measurement '{}' is not present in the product definition \"\n                                       \"and allow_new_measurements is set to False\".format(measurement))\n            else:\n                match = self.measurements[measurement]\n                for attr in self.__fixture_measurement:\n                    if match[attr] != content[attr]:\n                        raise RuntimeError(\"mismatch of measurement '{0}', \"\n                                           \"attribute '{1}': {2}, {3}\".\n                                           format(measurement, attr, match[attr], content[attr]))\n    \n    def export_indexing_yml(self, dataset, outdir):\n        \"\"\"\n        Write a YML file named {:meth:`Dataset.identifier`}_dcindex.yml, which can be used for indexing a dataset in\n        an Open Data Cube. The file will contain information from the product and the dataset and a test is first\n        performed to check whether the dataset matches the product definition.\n        A unique ID is issued using :func:`uuid.uuid4()`.\n        \n        Parameters\n        ----------\n        dataset: Dataset\n            the dataset for which to export a file for\n        outdir: str\n            the directory to write the file to\n\n        Returns\n        -------\n\n        \"\"\"\n        \n        self.__validate()\n        \n        outname = os.path.join(outdir, dataset.identifier + '_dcindex.yml')\n        \n        if os.path.isfile(outname):\n            raise RuntimeError('indexing YML already exists: \\n   {}'.format(outname))\n        \n        if not os.path.isdir(outdir):\n            os.makedirs(outdir)\n        \n        self.check_integrity(dataset)\n        out = {'id': str(uuid.uuid4()),\n               'image': {'bands': {}},\n               'grid_spatial': {'projection': {}},\n               'extent': {'coord': {}},\n               'lineage': {'source_datasets': {}}}\n        \n        for measurement, content in dataset.measurements.items():\n            out['image']['bands'][measurement] = {'path': content['filename']}\n        \n        for attr in self.__fixture_metadata:\n            subkey = 'code' if attr == 'platform' else 'name'\n            out[attr] = {subkey: getattr(dataset, attr)}\n        \n        out['grid_spatial']['projection']['geo_ref_points'] = dataset.extent\n        out['grid_spatial']['projection']['spatial_reference'] = dataset.crs\n        \n        out['extent']['coord'] = dataset.extent_4326\n        out['extent']['from_dt'] = dataset.from_dt\n        out['extent']['to_dt'] = dataset.to_dt\n        \n        out['product_type'] = self.meta['metadata']['product_type']\n        \n        with open(outname, 'w') as yml:\n            yaml.dump(out, yml, default_flow_style=False)\n    \n    def export_ingestion_yml(self, outname, product_name, ingest_location, chunking):\n        \"\"\"\n        Write a YML file, which can be used for ingesting indexed datasets into an Open Data Cube.\n        \n        Parameters\n        ----------\n        outname: str\n            the name of the YML file to write\n        product_name: str\n            the name of the product in the ODC\n        ingest_location: str\n            the location of the ingested NetCDF files\n        chunking: dict\n            a dictionary with keys 'x', 'y' and 'time'; determines the size of the netCDF\n            files ingested into the datacube; e.g. {'x': 512, 'y': 512, 'time': 1}\n\n        Returns\n        -------\n\n        \"\"\"\n        if os.path.isfile(outname):\n            raise RuntimeError('product definition YML already exists: \\n   {}'.format(outname))\n        \n        self.__validate()\n        \n        if product_name == self.meta['name']:\n            raise ValueError('source and target product names must be different')\n\n        outdir = os.path.dirname(outname)\n        if not os.path.isdir(outdir):\n            os.makedirs(outdir)\n        \n        file_path_template = '{0}/{1}_{2}_{3}_{4}_' \\\n                             '{{tile_index[0]}}_' \\\n                             '{{tile_index[1]}}_' \\\n                             '{{start_time}}.nc'.format(product_name,\n                                                        self.platform,\n                                                        self.instrument,\n                                                        self.product_type,\n                                                        self.crs.replace('EPSG:', ''))\n        \n        global_attributes = {'instrument': self.instrument,\n                             'platform': self.platform,\n                             'institution': 'ESA',\n                             'achknowledgment': 'Sentinel-1 data is provided by the European Space Agency '\n                                                'on behalf of the European Commission via download.'}\n        \n        storage = self.meta['storage']\n        storage['driver'] = 'NetCDF CF'\n        storage['tile_size'] = {}\n        storage['tile_size']['x'] = storage['resolution']['x'] * chunking['x']\n        storage['tile_size']['y'] = storage['resolution']['y'] * chunking['y']\n        storage['chunking'] = chunking\n        storage['dimension_order'] = ['time', 'y', 'x']\n        \n        measurements = self.meta['measurements']\n        for measurement in measurements:\n            measurement['resampling_method'] = 'nearest'\n            measurement['src_varname'] = measurement['name']\n        \n        out = {'source_type': self.meta['name'],\n               'output_type': product_name,\n               'description': self.meta['description'],\n               'location': ingest_location,\n               'file_path_template': file_path_template,\n               'storage': self.meta['storage'],\n               'measurements': self.meta['measurements'],\n               'global_attributes': global_attributes}\n        \n        with open(outname, 'w') as yml:\n            yaml.dump(out, yml, default_flow_style=False)\n    \n    @property\n    def measurements(self):\n        \"\"\"\n        \n        Returns\n        -------\n        dict of dict\n            a dictionary with measurement names as keys\n        \"\"\"\n        return dict([(x['name'], x) for x in self.meta['measurements']])\n    \n    def write(self, ymlfile):\n        \"\"\"\n        write the product definition to a YML file\n        \n        Parameters\n        ----------\n        ymlfile: str\n            the file to write to\n        \n        Returns\n        -------\n        \n        \"\"\"\n        if os.path.isfile(ymlfile):\n            raise RuntimeError('ingestion YML already exists: \\n   {}'.format(ymlfile))\n        \n        self.__validate()\n        with open(ymlfile, 'w') as yml:\n            yaml.dump(self.meta, yml, default_flow_style=False)\n"
  },
  {
    "path": "pyroSAR/drivers.py",
    "content": "###############################################################################\n# Reading and Organizing system for SAR images\n# Copyright (c) 2016-2026, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\n\"\"\"\nThis is the core module of package pyroSAR.\nIt contains the drivers for the different SAR image formats and offers\nfunctionality for retrieving metadata, unpacking images, downloading ancillary files like DEMs and\nOrbit State Vector files as well as archiving scenes in a database.\nThe :class:`ID` class and its subclasses allow easy and standardized access to the metadata of\nimages from different SAR sensors.\n\"\"\"\nfrom __future__ import annotations\n\nfrom builtins import str\nfrom io import BytesIO\n\nimport abc\nimport ast\nimport math\nimport os\nimport re\nimport shutil\nimport struct\nimport operator\nimport tarfile as tf\nimport xml.etree.ElementTree as ET\nimport zipfile as zf\nfrom datetime import datetime, timezone, timedelta\nfrom dateutil.parser import parse as dateparse\nfrom time import strptime, strftime\nfrom statistics import mean, median\nfrom itertools import groupby\nfrom PIL import Image\n\nimport progressbar as pb\nfrom osgeo import gdal, osr, ogr\nfrom osgeo.gdalconst import GA_ReadOnly\nimport numpy as np\n\nfrom . import S1, patterns\nfrom .config import __LOCAL__\nfrom .ERS import passdb_query, get_resolution_nesz\nfrom .xml_util import getNamespaces\n\nfrom spatialist import crsConvert, Vector, bbox\nfrom spatialist.ancillary import parse_literal, finder, multicore\n\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef identify(scene):\n    \"\"\"\n    identify a SAR scene and return the appropriate metadata handler object\n\n    Parameters\n    ----------\n    scene: str\n        a file or directory name\n\n    Returns\n    -------\n    pyroSAR.drivers.ID\n        a pyroSAR metadata handler\n    \n    Examples\n    --------\n\n    >>> from pyroSAR import identify\n    >>> filename = 'S1A_IW_GRDH_1SDV_20180829T170656_20180829T170721_023464_028DE0_F7BD.zip'\n    >>> scene = identify(filename)\n    >>> print(scene)\n    pyroSAR ID object of type SAFE\n    acquisition_mode: IW\n    cycleNumber: 148\n    frameNumber: 167392\n    lines: 16703\n    orbit: A\n    orbitNumber_abs: 23464\n    orbitNumber_rel: 117\n    polarizations: ['VV', 'VH']\n    product: GRD\n    projection: +proj=longlat +datum=WGS84 +no_defs\n    samples: 26056\n    sensor: S1A\n    spacing: (10.0, 10.0)\n    start: 20180829T170656\n    stop: 20180829T170721\n    \"\"\"\n    if not os.path.exists(scene):\n        raise OSError(\"No such file or directory: '{}'\".format(scene))\n    \n    def get_subclasses(c):\n        subclasses = c.__subclasses__()\n        for subclass in subclasses.copy():\n            subclasses.extend(get_subclasses(subclass))\n        return list(set(subclasses))\n    \n    for handler in get_subclasses(ID):\n        try:\n            return handler(scene)\n        except Exception:\n            pass\n    raise RuntimeError('data format not supported')\n\n\ndef identify_many(scenes, pbar=False, sortkey=None, cores=1):\n    \"\"\"\n    wrapper function for returning metadata handlers of all valid scenes in a list,\n    similar to function :func:`~pyroSAR.drivers.identify`.\n\n    Parameters\n    ----------\n    scenes: list[str or ID]\n        the file names of the scenes to be identified\n    pbar: bool\n        adds a progressbar if True\n    sortkey: str or None\n        sort the handler object list by an attribute\n    cores: int\n        the number of cores to parallelize identification\n    \n    Returns\n    -------\n    list[ID]\n        a list of pyroSAR metadata handlers\n    \n    Examples\n    --------\n    >>> from pyroSAR import identify_many\n    >>> files = finder('/path', ['S1*.zip'])\n    >>> ids = identify_many(files, pbar=False, sortkey='start')\n    \"\"\"\n    \n    def handler(scene):\n        if isinstance(scene, ID):\n            return scene\n        else:\n            try:\n                id = identify(scene)\n                return id\n            except RuntimeError:\n                return None\n            except PermissionError:\n                log.warning(\"Permission denied: '{}'\".format(scene))\n    \n    if cores == 1:\n        idlist = []\n        if pbar:\n            progress = pb.ProgressBar(max_value=len(scenes)).start()\n        else:\n            progress = None\n        for i, scene in enumerate(scenes):\n            id = handler(scene)\n            idlist.append(id)\n            if progress is not None:\n                progress.update(i + 1)\n        if progress is not None:\n            progress.finish()\n    else:\n        idlist = multicore(function=handler, multiargs={'scene': scenes},\n                           pbar=pbar, cores=cores)\n    if sortkey is not None:\n        idlist.sort(key=operator.attrgetter(sortkey))\n    idlist = list(filter(None, idlist))\n    return idlist\n\n\ndef filter_processed(scenelist, outdir, recursive=False):\n    \"\"\"\n    Filter a list of pyroSAR objects to those that have not yet been processed and stored in the defined directory.\n    The search for processed scenes is either done in the directory only or recursively into subdirectories.\n    The scenes must have been processed with pyroSAR in order to follow the right naming scheme.\n\n    Parameters\n    ----------\n    scenelist: list[ID]\n        a list of pyroSAR objects\n    outdir: str\n        the processing directory\n    recursive: bool\n        scan `outdir` recursively into subdirectories?\n\n    Returns\n    -------\n    list[ID]\n        a list of those scenes, which have not been processed yet\n    \"\"\"\n    return [x for x in scenelist if not x.is_processed(outdir, recursive)]\n\n\nclass ID(object):\n    \"\"\"\n    Abstract class for SAR meta data handlers\n    \"\"\"\n    \n    def __init__(self, metadict):\n        \"\"\"\n        to be called by the __init__methods of the format drivers\n        scans a metadata dictionary and registers entries with a standardized name as object attributes\n        see __LOCAL__ for standard names. It must be ensured that each of these is actually read by the individual SAR format driver.\n\n        :param metadict: a dictionary containing the metadata attributes of a SAR scene\n        \"\"\"\n        self.locals = __LOCAL__\n        for item in self.locals:\n            setattr(self, item, metadict[item])\n    \n    def __getattr__(self, item):\n        raise AttributeError(\"object has no attribute '{}'\".format(item))\n    \n    def __str__(self):\n        lines = ['pyroSAR ID object of type {}'.format(self.__class__.__name__)]\n        for item in sorted(self.locals):\n            value = getattr(self, item)\n            if item == 'projection':\n                value = crsConvert(value, 'proj4') if value is not None else None\n            if value == -1:\n                value = '<no global value per product>'\n            line = '{0}: {1}'.format(item, value)\n            lines.append(line)\n        return '\\n'.join(lines)\n    \n    def bbox(self, outname=None, driver=None, overwrite=True, buffer=None):\n        \"\"\"\n        get the bounding box of a scene. The result is either returned as\n        vector object or written to a file.\n\n        Parameters\n        ----------\n        outname: str\n            the name of the vector file to be written\n        driver: str\n            the output file format; needs to be defined if the format cannot\n            be auto-detected from the filename extension\n        overwrite: bool\n            overwrite an existing vector file?\n        buffer: None or int or float or tuple[int or float]\n            a buffer to add around `coordinates`. Default None: do not add\n            a buffer. A tuple is interpreted as (x buffer, y buffer).\n\n        Returns\n        -------\n        ~spatialist.vector.Vector or None\n            the vector object if `outname` is None and None otherwise\n        \n        See Also\n        --------\n        spatialist.vector.Vector.bbox\n        \"\"\"\n        if outname is None:\n            return bbox(coordinates=self.getCorners(), crs=self.projection,\n                        buffer=buffer)\n        else:\n            bbox(coordinates=self.getCorners(), crs=self.projection,\n                 outname=outname, driver=driver, overwrite=overwrite,\n                 buffer=buffer)\n    \n    def geometry(self, outname=None, driver=None, overwrite=True):\n        \"\"\"\n        get the footprint geometry of a scene either as a vector object or written to a file\n\n        Parameters\n        ----------\n        outname: str\n            the name of the vector file to be written\n        driver: str\n            the output file format; needs to be defined if the format cannot\n            be auto-detected from the filename extension\n        overwrite: bool\n            overwrite an existing vector file?\n\n        Returns\n        -------\n        ~spatialist.vector.Vector or None\n            the vector object if `outname` is None, None otherwise\n        \n        See also\n        --------\n        spatialist.vector.Vector.write\n        \"\"\"\n        if 'coordinates' not in self.meta.keys():\n            raise NotImplementedError\n        srs = crsConvert(self.projection, 'osr')\n        points = ogr.Geometry(ogr.wkbMultiPoint)\n        for lon, lat in self.meta['coordinates']:\n            point = ogr.Geometry(ogr.wkbPoint)\n            point.AddPoint(lon, lat)\n            points.AddGeometry(point)\n        geom = points.ConvexHull()\n        geom.FlattenTo2D()\n        point = points = None\n        exterior = geom.GetGeometryRef(0)\n        if exterior.IsClockwise():\n            points = list(exterior.GetPoints())\n            exterior.Empty()\n            for x, y in reversed(points):\n                exterior.AddPoint(x, y)\n            geom.CloseRings()\n        exterior = points = None\n        \n        bbox = Vector(driver='MEM')\n        bbox.addlayer('geometry', srs, geom.GetGeometryType())\n        bbox.addfield('area', ogr.OFTReal)\n        bbox.addfeature(geom, fields={'area': geom.Area()})\n        geom = None\n        if outname is None:\n            return bbox\n        else:\n            bbox.write(outfile=outname, driver=driver, overwrite=overwrite)\n    \n    @property\n    def compression(self):\n        \"\"\"\n        check whether a scene is compressed into an tarfile or zipfile or not at all\n\n        Returns\n        -------\n        str or None\n            either 'zip', 'tar' or None\n        \"\"\"\n        if os.path.isdir(self.scene):\n            return None\n        elif zf.is_zipfile(self.scene):\n            return 'zip'\n        elif tf.is_tarfile(self.scene):\n            return 'tar'\n        else:\n            return None\n    \n    def export2dict(self):\n        \"\"\"\n        Return the uuid and the metadata that is defined in `self.locals` as a dictionary\n        \"\"\"\n        metadata = {item: self.meta[item] for item in self.locals}\n        sq_file = os.path.basename(self.file)\n        title = os.path.splitext(sq_file)[0]\n        metadata['uuid'] = title\n        return metadata\n    \n    def examine(self, include_folders=False):\n        \"\"\"\n        check whether any items in the SAR scene structure (i.e. files/folders) match the regular expression pattern\n        defined by the class. On success the item is registered in the object as attribute `file`.\n\n        Parameters\n        ----------\n        include_folders: bool\n            also match folder (or just files)?\n\n        Returns\n        -------\n\n        Raises\n        -------\n        RuntimeError\n        \"\"\"\n        files = self.findfiles(self.pattern, include_folders=include_folders)\n        if len(files) == 1:\n            self.file = files[0]\n        elif len(files) == 0:\n            raise RuntimeError('scene does not match {} naming convention'.format(type(self).__name__))\n        else:\n            raise RuntimeError('file ambiguity detected:\\n{}'.format('\\n'.join(files)))\n    \n    def findfiles(self, pattern, include_folders=False):\n        \"\"\"\n        find files in the scene archive, which match a pattern.\n\n        Parameters\n        ----------\n        pattern: str\n            the regular expression to match\n        include_folders: bool\n             also match folders (or just files)?\n        Returns\n        -------\n        list[str]\n            the matched file names\n        \n        See Also\n        --------\n        :func:`spatialist.ancillary.finder`\n        \"\"\"\n        foldermode = 1 if include_folders else 0\n        \n        try:\n            files = finder(target=self.scene, matchlist=[pattern],\n                           foldermode=foldermode, regex=True)\n        except RuntimeError:\n            # Return the scene if only a file and not zip\n            return self.scene\n        \n        if os.path.isdir(self.scene) \\\n                and re.search(pattern, os.path.basename(self.scene)) \\\n                and include_folders:\n            files.append(self.scene)\n        \n        return files\n    \n    def gdalinfo(self):\n        \"\"\"\n        read metadata directly from the GDAL SAR image drivers\n\n        Returns\n        -------\n        dict\n            the metadata attributes\n        \"\"\"\n        files = self.findfiles(r'(?:\\.[NE][12]$|DAT_01\\.001$|product\\.xml|manifest\\.safe$)')\n        # If only one file return the file in array\n        if isinstance(files, str):\n            files = [files]\n        \n        if len(files) == 1:\n            prefix = {'zip': '/vsizip/', 'tar': '/vsitar/', None: ''}[self.compression]\n            header = files[0]\n        elif len(files) > 1:\n            raise RuntimeError('file ambiguity detected')\n        else:\n            raise RuntimeError('file type not supported')\n        \n        meta = {}\n        \n        ext_lookup = {'.N1': 'ASAR', '.E1': 'ERS1', '.E2': 'ERS2'}\n        extension = os.path.splitext(header)[1]\n        if extension in ext_lookup:\n            meta['sensor'] = ext_lookup[extension]\n            info = gdal.Info(prefix + header, options=gdal.InfoOptions(allMetadata=True, format='json'))\n            meta['extra'] = info\n        \n        img = gdal.Open(prefix + header, GA_ReadOnly)\n        gdalmeta = img.GetMetadata()\n        meta['samples'], meta['lines'], meta['bands'] = img.RasterXSize, img.RasterYSize, img.RasterCount\n        meta['projection'] = img.GetGCPProjection()\n        meta['gcps'] = [((x.GCPPixel, x.GCPLine), (x.GCPX, x.GCPY, x.GCPZ)) for x in img.GetGCPs()]\n        img = None\n        \n        for item in gdalmeta:\n            entry = [item, parse_literal(gdalmeta[item].strip())]\n            \n            try:\n                entry[1] = self.parse_date(str(entry[1]))\n            except ValueError:\n                pass\n            \n            if re.search('LAT|LONG', entry[0]):\n                entry[1] /= 1000000.\n            meta[entry[0]] = entry[1]\n        return meta\n    \n    def getCorners(self):\n        \"\"\"\n        Get the bounding box corner coordinates\n\n        Returns\n        -------\n        dict\n            the corner coordinates as a dictionary with keys `xmin`, `ymin`, `xmax`, `ymax`\n        \"\"\"\n        if 'coordinates' not in self.meta.keys():\n            raise NotImplementedError\n        coordinates = self.meta['coordinates']\n        lat = [x[1] for x in coordinates]\n        lon = [x[0] for x in coordinates]\n        return {'xmin': min(lon), 'xmax': max(lon), 'ymin': min(lat), 'ymax': max(lat)}\n    \n    def getFileObj(self, filename):\n        \"\"\"\n        Load a file into a readable file object.\n\n        Parameters\n        ----------\n        filename: str\n            the name of a file in the scene archive, easiest to get with method :meth:`~ID.findfiles`\n\n        Returns\n        -------\n        io.BytesIO\n            a file pointer object\n        \"\"\"\n        return getFileObj(self.scene, filename)\n    \n    def getGammaImages(self, directory=None):\n        \"\"\"\n        list all files processed by GAMMA\n\n        Parameters\n        ----------\n        directory: str or None\n            the directory to be scanned; if left empty the object attribute `gammadir` is scanned\n\n        Returns\n        -------\n        list[str]\n            the file names of the images processed by GAMMA\n\n        Raises\n        -------\n        RuntimeError\n        \"\"\"\n        if directory is None:\n            if hasattr(self, 'gammadir'):\n                directory = self.gammadir\n            else:\n                raise RuntimeError(\n                    'directory missing; please provide directory to function or define object attribute \"gammadir\"')\n        return [x for x in finder(directory, [self.outname_base()], regex=True) if\n                not re.search(r'\\.(?:par|hdr|aux\\.xml|swp|sh)$', x)]\n    \n    def getHGT(self):\n        \"\"\"\n        get the names of all SRTM HGT tiles overlapping with the SAR scene\n\n        Returns\n        -------\n        list[str]\n            names of the SRTM HGT tiles\n        \"\"\"\n        \n        corners = self.getCorners()\n        \n        # generate sequence of integer coordinates marking the tie points of the overlapping hgt tiles\n        lat = range(int(float(corners['ymin']) // 1), int(float(corners['ymax']) // 1) + 1)\n        lon = range(int(float(corners['xmin']) // 1), int(float(corners['xmax']) // 1) + 1)\n        \n        # convert coordinates to string with leading zeros and hemisphere identification letter\n        lat = [str(x).zfill(2 + len(str(x)) - len(str(x).strip('-'))) for x in lat]\n        lat = [x.replace('-', 'S') if '-' in x else 'N' + x for x in lat]\n        \n        lon = [str(x).zfill(3 + len(str(x)) - len(str(x).strip('-'))) for x in lon]\n        lon = [x.replace('-', 'W') if '-' in x else 'E' + x for x in lon]\n        \n        # concatenate all formatted latitudes and longitudes with each other as final product\n        return [x + y + '.hgt' for x in lat for y in lon]\n    \n    def is_processed(self, outdir, recursive=False):\n        \"\"\"\n        check whether a scene has already been processed and stored in the defined output directory\n        (and subdirectories if scanned recursively)\n\n        Parameters\n        ----------\n        outdir: str\n            the directory to be checked\n\n        Returns\n        -------\n        bool\n            does an image matching the scene pattern exist?\n        \"\"\"\n        if os.path.isdir(outdir):\n            # '{}.*tif$'.format(self.outname_base())\n            return len(finder(outdir, [self.outname_base()], regex=True, recursive=recursive)) != 0\n        else:\n            return False\n    \n    def outname_base(self, extensions=None):\n        \"\"\"\n        parse a string containing basic information about the scene in standardized format.\n        Currently, this id contains the sensor (4 digits), acquisition mode (4 digits), orbit (1 digit)\n        and acquisition start time (15 digits)., e.g. `S1A__IW___A_20150523T122350`.\n        \n        Parameters\n        ----------\n        extensions: list[str] or None\n            the names of additional parameters to append to the basename, e.g. ``['orbitNumber_rel']``\n        Returns\n        -------\n        str\n            a standardized name unique to the scene\n            \n        \"\"\"\n        \n        fields = ('{:_<4}'.format(self.sensor),\n                  '{:_<4}'.format(self.acquisition_mode),\n                  self.orbit,\n                  self.start)\n        out = '_'.join(fields)\n        if isinstance(extensions, list) and len(extensions) is not None:\n            ext = '_'.join([str(getattr(self, key)) for key in extensions])\n            out += '_' + ext\n        return out\n    \n    @staticmethod\n    def parse_date(x):\n        \"\"\"\n        this function gathers known time formats provided in the different SAR products and converts them to a common\n        standard of the form YYYYMMDDTHHMMSS.\n\n        Parameters\n        ----------\n        x: str\n            the time stamp\n\n        Returns\n        -------\n        str\n            the converted time stamp in format YYYYmmddTHHMMSS\n        \"\"\"\n        return parse_date(x)\n    \n    @abc.abstractmethod\n    def quicklook(self, outname, format='kmz'):\n        \"\"\"\n        export a quick look image of the scene\n\n        Parameters\n        ----------\n        outname: str\n            the name of the output file\n        format: str\n            the format of the file to write;\n            currently only kmz is supported\n\n        Returns\n        -------\n\n        Examples\n        --------\n\n        >>> from pyroSAR import identify\n        >>> scene = identify('S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip')\n        >>> scene.quicklook('S1A__IW___A_20180101T170648.kmz')\n        \"\"\"\n        raise NotImplementedError\n    \n    @property\n    def start_dt(self) -> datetime:\n        \"\"\"\n        \n        Returns\n        -------\n            the acquisition start time as timezone-aware datetime object\n        \"\"\"\n        out = datetime.strptime(self.start, '%Y%m%dT%H%M%S')\n        return out.replace(tzinfo=timezone.utc)\n    \n    @property\n    def stop_dt(self) -> datetime:\n        \"\"\"\n        \n        Returns\n        -------\n            the acquisition stop time as timezone-aware datetime object\n        \"\"\"\n        out = datetime.strptime(self.stop, '%Y%m%dT%H%M%S')\n        return out.replace(tzinfo=timezone.utc)\n    \n    def summary(self):\n        \"\"\"\n        print the set of standardized scene metadata attributes\n\n        Returns\n        -------\n\n        \"\"\"\n        print(self.__str__())\n    \n    @abc.abstractmethod\n    def scanMetadata(self):\n        \"\"\"\n        scan SAR scenes for metadata attributes.\n        The returned dictionary is registered as attribute `meta` by the class upon object initialization.\n        This dictionary furthermore needs to return a set of standardized attribute keys,\n        which are directly registered as object attributes.\n\n        Returns\n        -------\n        dict\n            the derived attributes\n\n        \"\"\"\n        raise NotImplementedError\n    \n    @abc.abstractmethod\n    def unpack(self, directory, overwrite=False, exist_ok=False):\n        \"\"\"\n        Unpack the SAR scene into a defined directory.\n\n        Parameters\n        ----------\n        directory: str\n            the base directory into which the scene is unpacked\n        overwrite: bool\n            overwrite an existing unpacked scene?\n        exist_ok: bool\n            allow existing output files and do not create new ones?\n\n        Returns\n        -------\n\n        \"\"\"\n        raise NotImplementedError\n    \n    def _unpack(self, directory, offset=None, overwrite=False, exist_ok=False):\n        \"\"\"\n        general function for unpacking scene archives; to be called by implementations of ID.unpack.\n        Will reset object attributes `scene` and `file` to point to the locations of the unpacked scene\n        \n        Parameters\n        ----------\n        directory: str\n            the name of the directory in which the files are written\n        offset: str\n            an archive directory offset; to be defined if only a subdirectory is to be unpacked (see e.g. TSX.unpack)\n        overwrite: bool\n            should an existing directory be overwritten?\n        exist_ok: bool\n            do not attempt unpacking if the target directory already exists? Ignored if ``overwrite==True``\n        \n        Returns\n        -------\n        \n        \"\"\"\n        do_unpack = True\n        if os.path.isdir(directory):\n            if overwrite:\n                shutil.rmtree(directory)\n            else:\n                if exist_ok:\n                    do_unpack = False\n                else:\n                    raise RuntimeError('target scene directory already exists: {}'.format(directory))\n        os.makedirs(directory, exist_ok=True)\n        \n        if do_unpack:\n            if tf.is_tarfile(self.scene):\n                archive = tf.open(self.scene, 'r')\n                names = archive.getnames()\n                if offset is not None:\n                    names = [x for x in names if x.startswith(offset)]\n                header = os.path.commonprefix(names)\n                \n                if header in names:\n                    if archive.getmember(header).isdir():\n                        for item in sorted(names):\n                            if item != header:\n                                member = archive.getmember(item)\n                                if offset is not None:\n                                    member.name = member.name.replace(offset + '/', '')\n                                archive.extract(member, directory)\n                        archive.close()\n                    else:\n                        archive.extractall(directory)\n                        archive.close()\n            \n            elif zf.is_zipfile(self.scene):\n                archive = zf.ZipFile(self.scene, 'r')\n                names = archive.namelist()\n                header = os.path.commonprefix(names)\n                if header.endswith('/'):\n                    for item in sorted(names):\n                        if item != header:\n                            repl = item.replace(header, '', 1)\n                            outname = os.path.join(directory, repl)\n                            outname = outname.replace('/', os.path.sep)\n                            if item.endswith('/'):\n                                os.makedirs(outname, exist_ok=True)\n                            else:\n                                os.makedirs(os.path.dirname(outname), exist_ok=True)\n                                try:\n                                    with open(outname, 'wb') as outfile:\n                                        outfile.write(archive.read(item))\n                                except zf.BadZipfile:\n                                    log.info('corrupt archive, unpacking failed')\n                                    continue\n                    archive.close()\n                else:\n                    archive.extractall(directory)\n                    archive.close()\n            else:\n                log.info('unpacking is only supported for TAR and ZIP archives')\n                return\n        \n        self.scene = directory\n        main = os.path.join(self.scene, os.path.basename(self.file))\n        self.file = main if os.path.isfile(main) else self.scene\n\n\nclass BEAM_DIMAP(ID):\n    \"\"\"\n    Handler class for BEAM-DIMAP data\n\n    Sensors:\n        * SNAP supported sensors\n    \"\"\"\n    \n    def __init__(self, scene):\n        \n        if not scene.lower().endswith('.dim'):\n            raise RuntimeError('Scene format is not BEAM-DIMAP')\n        \n        self.root = None\n        self.scene = scene\n        self.meta = self.scanMetadata()\n        \n        super(BEAM_DIMAP, self).__init__(self.meta)\n    \n    def scanMetadata(self):\n        meta = dict()\n        \n        self.root = ET.parse(self.scene).getroot()\n        \n        def get_by_name(attr: list[str] | str, section: str = 'Abstracted_Metadata') -> str:\n            msg = 'cannot get attribute \"{}\" from section \"{}\"'\n            if isinstance(attr, list):\n                for i, item in enumerate(attr):\n                    try:\n                        return get_by_name(item, section=section)\n                    except RuntimeError:\n                        continue\n                raise RuntimeError(msg.format('|'.join(attr), section))\n            else:\n                element = self.root.find(f'.//MDElem[@name=\"{section}\"]')\n                out = element.find(f'.//MDATTR[@name=\"{attr}\"]')\n                if out is None or out.text in ['99999', '99999.0']:\n                    raise RuntimeError(msg.format(attr, section))\n                return out.text\n        \n        missions = {'ENVISAT': 'ASAR',\n                    'ERS1': 'ERS1',\n                    'ERS2': 'ERS2',\n                    'SENTINEL-1A': 'S1A',\n                    'SENTINEL-1B': 'S1B',\n                    'SENTINEL-1C': 'S1C',\n                    'SENTINEL-1D': 'S1D'}\n        \n        section = 'Abstracted_Metadata'\n        meta['sensor'] = missions[get_by_name('MISSION', section=section)]\n        if re.search('S1[A-Z]', meta['sensor']):\n            meta['acquisition_mode'] = get_by_name('ACQUISITION_MODE', section=section)\n            meta['product'] = self.root.find('.//PRODUCT_TYPE').text\n        elif meta['sensor'] in ['ASAR', 'ERS1', 'ERS2']:\n            product_type = get_by_name('PRODUCT_TYPE', section=section)\n            meta['acquisition_mode'] = product_type[4:7]\n            # product overview table: https://doi.org/10.5167/UZH-96146\n            if meta['acquisition_mode'] in ['APS', 'IMS', 'WSS']:\n                meta['product'] = 'SLC'\n            elif meta['acquisition_mode'] in ['APP', 'IMP']:\n                meta['product'] = 'PRI'\n            elif meta['acquisition_mode'] in ['APM', 'IMM', 'WSM']:\n                meta['product'] = 'MR'\n            else:\n                raise RuntimeError(f\"unsupported acquisition mode: '{meta['acquisition_mode']}'\")\n        else:\n            raise RuntimeError('unknown sensor {}'.format(meta['sensor']))\n        \n        meta['IPF_version'] = get_by_name('Processing_system_identifier', section=section)\n        \n        meta['orbit'] = get_by_name('PASS', section=section)[0]\n        pols = [x.text for x in self.root.findall('.//MDATTR[@desc=\"Polarization\"]')]\n        pols = list(filter(None, pols))\n        meta['polarizations'] = list(set([x for x in pols if '-' not in x]))\n        meta['spacing'] = (round(float(get_by_name('range_spacing', section=section)), 6),\n                           round(float(get_by_name('azimuth_spacing', section=section)), 6))\n        meta['looks'] = (float(get_by_name('range_looks', section=section)),\n                         float(get_by_name('azimuth_looks', section=section)))\n        meta['samples'] = int(self.root.find('.//BAND_RASTER_WIDTH').text)\n        meta['lines'] = int(self.root.find('.//BAND_RASTER_HEIGHT').text)\n        meta['bands'] = int(self.root.find('.//NBANDS').text)\n        meta['orbitNumber_abs'] = int(get_by_name('ABS_ORBIT', section=section))\n        meta['orbitNumber_rel'] = int(get_by_name('REL_ORBIT', section=section))\n        meta['cycleNumber'] = int(get_by_name(['orbit_cycle', 'CYCLE'], section=section))\n        meta['frameNumber'] = int(get_by_name(['data_take_id', 'ABS_ORBIT'], section=section))\n        \n        meta['swath'] = get_by_name('SWATH', section=section)\n        \n        srgr = bool(int(get_by_name('srgr_flag', section=section)))\n        meta['image_geometry'] = 'GROUND_RANGE' if srgr else 'SLANT_RANGE'\n        #################################################################################\n        # start, stop\n        start = datetime.strptime(self.root.find('.//PRODUCT_SCENE_RASTER_START_TIME').text,\n                                  '%d-%b-%Y %H:%M:%S.%f')\n        meta['start'] = start.strftime('%Y%m%dT%H%M%S')\n        stop = datetime.strptime(self.root.find('.//PRODUCT_SCENE_RASTER_STOP_TIME').text,\n                                 '%d-%b-%Y %H:%M:%S.%f')\n        meta['stop'] = stop.strftime('%Y%m%dT%H%M%S')\n        #################################################################################\n        # incident angle\n        # the incident angle is not stored consistently so several options are tried\n        while True:\n            # may be missing or set to '99999.0'\n            try:\n                inc_near = get_by_name('incidence_near', section=section)\n                inc_far = get_by_name('incidence_far', section=section)\n                incidence = (float(inc_near) + float(inc_far)) / 2\n                break\n            except RuntimeError:\n                pass\n            # this attribute might only apply to Sentinel-1\n            inc_elements = self.root.findall('.//MDATTR[@name=\"incidenceAngleMidSwath\"]')\n            if len(inc_elements) > 0:\n                incidence = [float(x.text) for x in inc_elements]\n                incidence = mean(incidence)\n                break\n            # the tie point grids are no longer present in geocoded products\n            inc_grid = os.path.join(self.scene.replace('.dim', '.data'),\n                                    'tie_point_grids', 'incident_angle.img')\n            if os.path.isfile(inc_grid):\n                ras = gdal.Open(inc_grid)\n                arr = ras.ReadAsArray()\n                incidence = np.mean(arr[arr != 0])\n                ras = arr = None\n                break\n            raise ValueError('cannot read the incident angle')\n        meta['incidence'] = incidence\n        #################################################################################\n        # projection\n        if self.root.find('.//WKT') is not None:\n            meta['projection'] = self.root.find('.//WKT').text.lstrip()\n        else:\n            meta['projection'] = crsConvert(4326, 'wkt')\n        #################################################################################\n        # coordinates\n        keys = ['{}_{}_{}'.format(a, b, c)\n                for a in ['first', 'last']\n                for b in ['far', 'near']\n                for c in ['lat', 'long']]\n        coords = {key: float(get_by_name(key, section=section))\n                  for key in keys}\n        \n        meta['coordinates'] = [(coords['first_near_long'], coords['first_near_lat']),\n                               (coords['last_near_long'], coords['last_near_lat']),\n                               (coords['last_far_long'], coords['last_far_lat']),\n                               (coords['first_far_long'], coords['first_far_lat'])]\n        #################################################################################\n        return meta\n    \n    def unpack(self, directory, overwrite=False, exist_ok=False):\n        raise RuntimeError('unpacking of BEAM-DIMAP products is not supported')\n\n\nclass CEOS_ERS(ID):\n    \"\"\"\n    Handler class for ERS data in CEOS format\n    \n    Sensors:\n        * ERS1\n        * ERS2\n    \n    Reference:\n        ER-IS-EPO-GS-5902-3: Annex C. ERS SAR.SLC/SLC-I. CCT and EXABYTE\n        (`ESA 1998 <https://earth.esa.int/documents/10174/1597298/SAR05E.pdf>`_)\n    \"\"\"\n    \n    def __init__(self, scene):\n        self.pattern = patterns.ceos_ers\n        \n        self.pattern_pid = r'(?P<sat_id>(?:SAR|ASA))_' \\\n                           r'(?P<image_mode>(?:IM(?:S|P|G|M|_)|AP(?:S|P|G|M|_)|WV(?:I|S|W|_)|WS(?:M|S|_)))_' \\\n                           r'(?P<processing_level>[012B][CP])'\n        \n        self.scene = os.path.realpath(scene)\n        \n        self.examine()\n        \n        self.meta = self.scanMetadata()\n        \n        # register the standardized meta attributes as object attributes\n        super(CEOS_ERS, self).__init__(self.meta)\n    \n    def unpack(self, directory, overwrite=False, exist_ok=False):\n        if self.sensor in ['ERS1', 'ERS2']:\n            base_file = re.sub(r'\\.PS$', '', os.path.basename(self.file))\n            base_dir = os.path.basename(directory.strip('/'))\n            \n            outdir = directory if base_file == base_dir else os.path.join(directory, base_file)\n            \n            self._unpack(outdir, overwrite=overwrite, exist_ok=exist_ok)\n        else:\n            raise NotImplementedError('sensor {} not implemented yet'.format(self.sensor))\n    \n    def scanMetadata(self):\n        meta = dict()\n        \n        match = re.match(re.compile(self.pattern), os.path.basename(self.file))\n        match2 = re.match(re.compile(self.pattern_pid), match.group('product_id'))\n        \n        if re.search('IM__0', match.group('product_id')):\n            raise RuntimeError('product level 0 not supported (yet)')\n        \n        meta['acquisition_mode'] = match2.group('image_mode')\n        meta['product'] = 'SLC' if meta['acquisition_mode'] in ['IMS', 'APS', 'WSS'] else 'PRI'\n        \n        lea_obj = self.getFileObj(self.findfiles('LEA_01.001')[0])\n        lea = lea_obj.read()\n        lea_obj.close()\n        fdr = lea[0:720]  # file descriptor record\n        dss = lea[720:(720 + 1886)]  # data set summary record\n        mpd = lea[(720 + 1886):(720 + 1886 + 1620)]  # map projection data record\n        ppd_start = 720 + 1886 + 1620\n        ppd_length = struct.unpack('>i', lea[ppd_start + 8: ppd_start + 12])[0]\n        ppd = lea[ppd_start:ppd_length]  # platform position data record\n        frd_start = 720 + 1886 + 1620 + ppd_length\n        frd = lea[frd_start:(frd_start + 12288)]  # facility related data record\n        \n        meta['sensor'] = dss[396:412].strip().decode()\n        meta['start'] = self.parse_date(str(dss[1814:1838].decode('utf-8')))\n        meta['stop'] = self.parse_date(str(dss[1862:1886].decode('utf-8')))\n        meta['polarizations'] = ['VV']\n        looks_range = float(dss[1174:1190])\n        looks_azimuth = float(dss[1190:1206])\n        meta['looks'] = (looks_range, looks_azimuth)\n        meta['heading'] = float(dss[468:476])\n        meta['orbit'] = 'D' if meta['heading'] > 180 else 'A'\n        orbitNumber, frameNumber = map(int, re.findall('[0-9]+', dss[36:68].decode('utf-8')))\n        meta['orbitNumber_abs'] = orbitNumber\n        meta['frameNumber'] = frameNumber\n        orbitInfo = passdb_query(meta['sensor'], datetime.strptime(meta['start'], '%Y%m%dT%H%M%S'))\n        meta['cycleNumber'] = orbitInfo['cycleNumber']\n        meta['orbitNumber_rel'] = orbitInfo['orbitNumber_rel']\n        spacing_azimuth = float(dss[1686:1702])\n        spacing_range = float(dss[1702:1718])\n        meta['spacing'] = (spacing_range, spacing_azimuth)\n        meta['incidence_angle'] = float(dss[484:492])\n        meta['proc_facility'] = dss[1045:1061].strip().decode()\n        meta['proc_system'] = dss[1061:1069].strip().decode()\n        meta['proc_version'] = dss[1069:1077].strip().decode()\n        \n        meta['antenna_flag'] = int(frd[658:662])\n        meta['k_db'] = -10 * math.log(float(frd[662:678]), 10)\n        meta['sc_db'] = {'ERS1': 59.61, 'ERS2': 60}[meta['sensor']]\n        \n        meta['samples'] = int(mpd[60:76])\n        meta['lines'] = int(mpd[76:92])\n        ul = (float(mpd[1088:1104]), float(mpd[1072:1088]))\n        ur = (float(mpd[1120:1136]), float(mpd[1104:1120]))\n        lr = (float(mpd[1152:1168]), float(mpd[1136:1152]))\n        ll = (float(mpd[1184:1200]), float(mpd[1168:1184]))\n        meta['coordinates'] = [ul, ur, lr, ll]\n        meta['projection'] = crsConvert(4326, 'wkt')\n        return meta\n        \n        # def correctAntennaPattern(self):\n        # the following section is only relevant for PRI products and can be considered future work\n        # select antenna gain correction lookup file from extracted meta information\n        # the lookup files are stored in a subfolder CAL which is included in the pythonland software package\n        # if sensor == 'ERS1':\n        #     if date < 19950717:\n        #         antenna = 'antenna_ERS1_x_x_19950716'\n        #     else:\n        #         if proc_sys == 'VMP':\n        #             antenna = 'antenna_ERS2_VMP_v68_x' if proc_vrs >= 6.8 else 'antenna_ERS2_VMP_x_v67'\n        #         elif proc_fac == 'UKPAF' and date < 19970121:\n        #             antenna = 'antenna_ERS1_UKPAF_19950717_19970120'\n        #         else:\n        #             antenna = 'antenna_ERS1'\n        # else:\n        #     if proc_sys == 'VMP':\n        #         antenna = 'antenna_ERS2_VMP_v68_x' if proc_vrs >= 6.8 else 'antenna_ERS2_VMP_x_v67'\n        #     elif proc_fac == 'UKPAF' and date < 19970121:\n        #         antenna = 'antenna_ERS2_UKPAF_x_19970120'\n        #     else:\n        #         antenna = 'antenna_ERS2'\n\n\nclass CEOS_PSR(ID):\n    \"\"\"\n    Handler class for ALOS-PALSAR data in CEOS format\n    \n    Sensors:\n        * PSR1\n        * PSR2\n\n    PALSAR-1:\n        References:\n            * NEB-01006: ALOS/PALSAR Level 1 Product Format Description\n              (`JAXA 2006 <https://www.eorc.jaxa.jp/ALOS/en/doc/fdata/PALSAR_L10_J_ENa.zip>`_)\n            * NEB-070062B: ALOS/PALSAR Level 1.1/1.5 Product Format Description\n              (`JAXA 2009 <https://www.eorc.jaxa.jp/ALOS/en/doc/fdata/PALSAR_x_Format_EL.pdf>`_)\n        Products / processing levels:\n            * 1.0\n            * 1.1\n            * 1.5\n        Acquisition modes:\n            * AB: [SP][HWDPC]\n            * A: supplemental remarks of the sensor type:\n                * S: Wide observation mode\n                * P: all other modes\n            * B: observation mode\n                * H: Fine mode\n                * W: ScanSAR mode\n                * D: Direct downlink mode\n                * P: Polarimetry mode\n                * C: Calibration mode\n    \n    PALSAR-2:\n        Reference:\n            ALOS-2/PALSAR-2 Level 1.1/1.5/2.1/3.1 CEOS SAR Product Format Description\n            (`JAXA 2014 <https://www.eorc.jaxa.jp/ALOS-2/en/doc/fdata/PALSAR-2_xx_Format_CEOS_E_r.pdf>`_).\n        Products / processing levels:\n            * 1.0\n            * 1.1\n            * 1.5\n        Acquisition modes:\n            * SBS: Spotlight mode\n            * UBS: Ultra-fine mode Single polarization\n            * UBD: Ultra-fine mode Dual polarization\n            * HBS: High-sensitive mode Single polarization\n            * HBD: High-sensitive mode Dual polarization\n            * HBQ: High-sensitive mode Full (Quad.) polarimetry\n            * FBS: Fine mode Single polarization\n            * FBD: Fine mode Dual polarization\n            * FBQ: Fine mode Full (Quad.) polarimetry\n            * WBS: Scan SAR nominal [14MHz] mode Single polarization\n            * WBD: Scan SAR nominal [14MHz] mode Dual polarization\n            * WWS: Scan SAR nominal [28MHz] mode Single polarization\n            * WWD: Scan SAR nominal [28MHz] mode Dual polarization\n            * VBS: Scan SAR wide mode Single polarization\n            * VBD: Scan SAR wide mode Dual polarization\n    \"\"\"\n    \n    def __init__(self, scene):\n        \n        self.scene = os.path.realpath(scene)\n        \n        candidates = [patterns.ceos_psr1, patterns.ceos_psr2]\n        \n        for i, pattern in enumerate(candidates):\n            self.pattern = pattern\n            try:\n                self.examine()\n                break\n            except RuntimeError as e:\n                if i + 1 == len(candidates):\n                    raise e\n        \n        self.meta = self.scanMetadata()\n        \n        # register the standardized meta attributes as object attributes\n        super(CEOS_PSR, self).__init__(self.meta)\n    \n    def _getLeaderfileContent(self):\n        led_obj = self.getFileObj(self.led_filename)\n        led = led_obj.read()\n        led_obj.close()\n        return led\n    \n    def _img_get_coordinates(self):\n        img_filename = self.findfiles('IMG')[0]\n        img_obj = self.getFileObj(img_filename)\n        imageFileDescriptor = img_obj.read(720)\n        \n        lineRecordLength = int(imageFileDescriptor[186:192])  # bytes per line + 412\n        numberOfRecords = int(imageFileDescriptor[180:186])\n        \n        signalDataDescriptor1 = img_obj.read(412)\n        img_obj.seek(720 + lineRecordLength * (numberOfRecords - 1))\n        signalDataDescriptor2 = img_obj.read()\n        \n        img_obj.close()\n        \n        lat = [signalDataDescriptor1[192:196], signalDataDescriptor1[200:204],\n               signalDataDescriptor2[192:196], signalDataDescriptor2[200:204]]\n        \n        lon = [signalDataDescriptor1[204:208], signalDataDescriptor1[212:216],\n               signalDataDescriptor2[204:208], signalDataDescriptor2[212:216]]\n        \n        lat = [struct.unpack('>i', x)[0] / 1000000. for x in lat]\n        lon = [struct.unpack('>i', x)[0] / 1000000. for x in lon]\n        \n        return list(zip(lon, lat))\n    \n    def _parseSummary(self):\n        try:\n            summary_file = self.getFileObj(self.findfiles('summary|workreport')[0])\n        except IndexError:\n            return {}\n        text = summary_file.getvalue().decode('utf-8').strip()\n        summary_file.close()\n        summary = ast.literal_eval('{\"' + re.sub(r'\\s*=', '\":', text).replace('\\n', ',\"') + '}')\n        for x, y in summary.items():\n            summary[x] = parse_literal(y)\n        return summary\n    \n    @property\n    def led_filename(self):\n        return self.findfiles(self.pattern)[0]\n    \n    def scanMetadata(self):\n        ################################################################################################################\n        # read leader (LED) file\n        led = self._getLeaderfileContent()\n        \n        # read summary text file\n        meta = self._parseSummary()\n        \n        # read polarizations from image file names\n        meta['polarizations'] = [re.search('[HV]{2}', os.path.basename(x)).group(0) for x in self.findfiles('^IMG-')]\n        ################################################################################################################\n        # read start and stop time\n        \n        try:\n            meta['start'] = self.parse_date(meta['Img_SceneStartDateTime'])\n            meta['stop'] = self.parse_date(meta['Img_SceneEndDateTime'])\n        except (AttributeError, KeyError):\n            try:\n                start_string = re.search('Img_SceneStartDateTime[ =\"0-9:.]*', led).group()\n                stop_string = re.search('Img_SceneEndDateTime[ =\"0-9:.]*', led).group()\n                meta['start'] = self.parse_date(re.search(r'\\d+\\s[\\d:.]+', start_string).group())\n                meta['stop'] = self.parse_date(re.search(r'\\d+\\s[\\d:.]+', stop_string).group())\n            except AttributeError:\n                raise IndexError('start and stop time stamps cannot be extracted; see file {}'\n                                 .format(self.led_filename))\n        ################################################################################################################\n        # read file descriptor record\n        p0 = 0\n        p1 = struct.unpack('>i', led[8:12])[0]\n        fileDescriptor = led[p0:p1]\n        # dataSetSummary\n        dss_n = int(fileDescriptor[180:186])\n        dss_l = int(fileDescriptor[186:192])\n        # mapProjectionData\n        mpd_n = int(fileDescriptor[192:198])\n        mpd_l = int(fileDescriptor[198:204])\n        # platformPositionData\n        ppd_n = int(fileDescriptor[204:210])\n        ppd_l = int(fileDescriptor[210:216])\n        # attitudeData\n        adr_n = int(fileDescriptor[216:222])\n        adr_l = int(fileDescriptor[222:228])\n        # radiometricData\n        rdr_n = int(fileDescriptor[228:234])\n        rdr_l = int(fileDescriptor[234:240])\n        # dataQualitySummary\n        dqs_n = int(fileDescriptor[252:258])\n        dqs_l = int(fileDescriptor[258:264])\n        meta['sensor'] = {'AL1': 'PSR1', 'AL2': 'PSR2'}[fileDescriptor[48:51].decode('utf-8')]\n        ################################################################################################################\n        # read leader file name information\n        \n        match = re.match(re.compile(self.pattern), os.path.basename(self.led_filename))\n        \n        if meta['sensor'] == 'PSR1':\n            meta['acquisition_mode'] = match.group('sub') + match.group('mode')\n        else:\n            meta['acquisition_mode'] = match.group('mode')\n        meta['product'] = match.group('level')\n        ################################################################################################################\n        # read led records\n        p0 = p1\n        p1 += dss_l * dss_n\n        dataSetSummary = led[p0:p1]\n        \n        if mpd_n > 0:\n            p0 = p1\n            p1 += mpd_l * mpd_n\n            mapProjectionData = led[p0:p1]\n        else:\n            mapProjectionData = None\n        \n        p0 = p1\n        p1 += ppd_l * ppd_n\n        platformPositionData = led[p0:p1]\n        \n        p0 = p1\n        p1 += adr_l * adr_n\n        attitudeData = led[p0:p1]\n        \n        p0 = p1\n        p1 += rdr_l * rdr_n\n        radiometricData = led[p0:p1]\n        \n        p0 = p1\n        p1 += dqs_l * dqs_n\n        dataQualitySummary = led[p0:p1]\n        \n        facilityRelatedData = []\n        while p1 < len(led):\n            p0 = p1\n            length = struct.unpack('>i', led[(p0 + 8):(p0 + 12)])[0]\n            p1 += length\n            facilityRelatedData.append(led[p0:p1])\n        ################################################################################################################\n        # read map projection data record\n        \n        if mapProjectionData is not None:\n            lat = list(map(float, [mapProjectionData[1072:1088],\n                                   mapProjectionData[1104:1120],\n                                   mapProjectionData[1136:1152],\n                                   mapProjectionData[1168:1184]]))\n            lon = list(map(float, [mapProjectionData[1088:1104],\n                                   mapProjectionData[1120:1136],\n                                   mapProjectionData[1152:1168],\n                                   mapProjectionData[1184:1200]]))\n            meta['coordinates'] = list(zip(lon, lat))\n            \n            # https://github.com/datalyze-solutions/LandsatProcessingPlugin/blob/master/src/metageta/formats/alos.py\n            \n            src_srs = osr.SpatialReference()\n            # src_srs.SetGeogCS('GRS 1980','GRS 1980','GRS 1980',6378137.00000,298.2572220972)\n            src_srs.SetWellKnownGeogCS('WGS84')\n            # Proj CS\n            projdesc = mapProjectionData[412:444].strip()\n            epsg = 0  # default\n            if projdesc == 'UTM-PROJECTION':\n                nZone = int(mapProjectionData[476:480])\n                dfFalseNorthing = float(mapProjectionData[496:512])\n                if dfFalseNorthing > 0.0:\n                    bNorth = False\n                    epsg = 32700 + nZone\n                else:\n                    bNorth = True\n                    epsg = 32600 + nZone\n                src_srs.ImportFromEPSG(epsg)\n                # src_srs.SetUTM(nZone,bNorth) #generates WKT that osr.SpatialReference.AutoIdentifyEPSG() doesn't return an EPSG for\n            elif projdesc == 'UPS-PROJECTION':\n                dfCenterLon = float(mapProjectionData[624, 640])\n                dfCenterLat = float(mapProjectionData[640, 656])\n                dfScale = float(mapProjectionData[656, 672])\n                src_srs.SetPS(dfCenterLat, dfCenterLon, dfScale, 0.0, 0.0)\n            elif projdesc == 'MER-PROJECTION':\n                dfCenterLon = float(mapProjectionData[736, 752])\n                dfCenterLat = float(mapProjectionData[752, 768])\n                src_srs.SetMercator(dfCenterLat, dfCenterLon, 0, 0, 0)\n            elif projdesc == 'LCC-PROJECTION':\n                dfCenterLon = float(mapProjectionData[736, 752])\n                dfCenterLat = float(mapProjectionData[752, 768])\n                dfStdP1 = float(mapProjectionData[768, 784])\n                dfStdP2 = float(mapProjectionData[784, 800])\n                src_srs.SetLCC(dfStdP1, dfStdP2, dfCenterLat, dfCenterLon, 0, 0)\n            meta['projection'] = src_srs.ExportToWkt()\n        \n        else:\n            coordinates = self._img_get_coordinates()\n            if all([x == (0, 0) for x in coordinates]):\n                meta['projection'] = None\n            else:\n                meta['coordinates'] = coordinates\n                meta['projection'] = crsConvert(4326, 'wkt')\n        ################################################################################################################\n        # read data set summary record\n        \n        if meta['product'] == '1.5':\n            meta[\"heading_scene\"] = float(dataSetSummary[148:164])\n            meta[\"heading\"] = float(dataSetSummary[468:476])\n        else:\n            meta[\"heading_scene\"] = None\n            meta[\"heading\"] = None\n        \n        scene_id = dataSetSummary[20:52].decode('ascii')\n        \n        if meta['sensor'] == 'PSR1':\n            pattern = r'(?P<sat_id>[A-Z]{2})' \\\n                      r'(?P<sensor_id>[A-Z]{3})' \\\n                      r'(?P<sensor_id_sub>[A-Z]{1})' \\\n                      r'(?P<orbitNumber>[0-9]{5})' \\\n                      r'(?P<frameNumber>[0-9]{4})'\n        elif meta['sensor'] == 'PSR2':\n            pattern = r'(?P<sat_id>[A-Z0-9]{5})' \\\n                      r'(?P<orbitNumber>[0-9]{5})' \\\n                      r'(?P<frameNumber>[0-9]{4})-' \\\n                      r'(?P<obs_day>[0-9]{6})[ ]{11}'\n        else:\n            raise ValueError('sensor must be either PSR1 or PSR2; is: {}'.format(meta['sensor']))\n        \n        match = re.match(re.compile(pattern), scene_id)\n        \n        orbitsPerCycle = {'PSR1': 671, 'PSR2': 207}[meta['sensor']]\n        \n        meta['orbitNumber_abs'] = int(match.group('orbitNumber'))\n        meta['orbitNumber_rel'] = meta['orbitNumber_abs'] % orbitsPerCycle\n        meta['cycleNumber'] = meta['orbitNumber_abs'] // orbitsPerCycle + 1\n        meta['frameNumber'] = int(match.group('frameNumber'))\n        \n        try:\n            meta['lines'] = int(dataSetSummary[324:332]) * 2\n        except ValueError:\n            if 'Pdi_NoOfLines' in meta.keys():\n                meta['lines'] = meta['Pdi_NoOfLines']\n            else:\n                meta['lines'] = None\n        try:\n            meta['samples'] = int(dataSetSummary[332:340]) * 2\n        except ValueError:\n            if 'Pdi_NoOfPixels' in meta.keys():\n                meta['samples'] = meta['Pdi_NoOfPixels']\n            else:\n                meta['samples'] = None\n        meta['incidence'] = float(dataSetSummary[484:492])\n        meta['wavelength'] = float(dataSetSummary[500:516]) * 100  # in cm\n        meta['proc_facility'] = dataSetSummary[1046:1062].strip()\n        meta['proc_system'] = dataSetSummary[1062:1070].strip()\n        meta['proc_version'] = dataSetSummary[1070:1078].strip()\n        \n        try:\n            azlks = float(dataSetSummary[1174:1190])\n            rlks = float(dataSetSummary[1190:1206])\n            meta['looks'] = (rlks, azlks)\n        except ValueError:\n            meta['looks'] = (None, None)\n        \n        meta['orbit'] = dataSetSummary[1534:1542].decode('utf-8').strip()[0]\n        \n        try:\n            spacing_azimuth = float(dataSetSummary[1686:1702])\n            spacing_range = float(dataSetSummary[1702:1718])\n            meta['spacing'] = (spacing_range, spacing_azimuth)\n        except ValueError:\n            meta['spacing'] = (None, None)\n        ################################################################################################################\n        # read radiometric data record\n        if len(radiometricData) > 0:\n            meta['k_dB'] = float(radiometricData[20:36])\n        else:\n            meta['k_dB'] = None\n        ################################################################################################################\n        # additional notes\n        \n        # the following can be used to read platform position time from the led file\n        # this covers a larger time frame than the actual scene sensing time\n        # y, m, d, nd, s = platformPositionData[144:182].split()\n        # start = datetime(int(y), int(m), int(d)) + timedelta(seconds=float(s))\n        # npoints = int(platformPositionData[140:144])\n        # interval = float(platformPositionData[182:204])\n        # stop = start + timedelta(seconds=(npoints - 1) * interval)\n        # parse_date(start)\n        # parse_date(stop)\n        \n        return meta\n    \n    def unpack(self, directory, overwrite=False, exist_ok=False):\n        outdir = os.path.join(directory, os.path.basename(self.file).replace('LED-', ''))\n        self._unpack(outdir, overwrite=overwrite, exist_ok=exist_ok)\n\n\nclass EORC_PSR(ID):\n    \"\"\"\n    Handler class for ALOS-2/PALSAR-2 data in EORC (Earth Observation Research Center) Path format\n    \n    Sensors:\n        * PALSAR-2\n\n    PALSAR-2:\n        Reference: \n            NDX-150019: ALOS-2/PALSAR-2 EORC Path Product Format Description (JAXA 2016)\n        Products / processing levels:\n            * 1.5\n        Acquisition modes:\n            * FBD: Fine mode Dual polarization\n            * WBD: Scan SAR nominal [14MHz] mode Dual polarization\n    \"\"\"\n    \n    def __init__(self, scene):\n        \n        self.scene = os.path.realpath(scene)\n        \n        self.pattern = patterns.eorc_psr\n        \n        self.examine()\n        \n        self.meta = self.scanMetadata()\n        \n        # register the standardized meta attributes as object attributes\n        super(EORC_PSR, self).__init__(self.meta)\n    \n    def _getHeaderfileContent(self):\n        head_obj = self.getFileObj(self.header_filename)\n        head = head_obj.read().decode('utf-8')\n        head = list(head.split('\\n'))\n        head_obj.close()\n        return head\n    \n    def _img_get_coordinates(self):\n        img_filename = self.findfiles('IMG')[0]\n        img_obj = self.getFileObj(img_filename)\n        imageFileDescriptor = img_obj.read(720)\n        \n        lineRecordLength = int(imageFileDescriptor[186:192])  # bytes per line + 412\n        numberOfRecords = int(imageFileDescriptor[180:186])\n        \n        signalDataDescriptor1 = img_obj.read(412)\n        img_obj.seek(720 + lineRecordLength * (numberOfRecords - 1))\n        signalDataDescriptor2 = img_obj.read()\n        \n        img_obj.close()\n        \n        lat = [signalDataDescriptor1[192:196], signalDataDescriptor1[200:204],\n               signalDataDescriptor2[192:196], signalDataDescriptor2[200:204]]\n        \n        lon = [signalDataDescriptor1[204:208], signalDataDescriptor1[212:216],\n               signalDataDescriptor2[204:208], signalDataDescriptor2[212:216]]\n        \n        lat = [struct.unpack('>i', x)[0] / 1000000. for x in lat]\n        lon = [struct.unpack('>i', x)[0] / 1000000. for x in lon]\n        \n        return list(zip(lon, lat))\n    \n    def _parseFacter_m(self):\n        try:\n            facter_file = self.findfiles('facter_m.dat')[0]\n        except IndexError:\n            return {}\n        facter_obj = self.getFileObj(facter_file)\n        facter_m = facter_obj.read().decode('utf-8')\n        facter_m = list(facter_m.split('\\n'))\n        facter_obj.close()\n        return facter_m\n    \n    @property\n    def header_filename(self):\n        return self.findfiles(self.pattern)[0]\n    \n    def scanMetadata(self):\n        ################################################################################################################\n        # read header (HDR) file\n        header = self._getHeaderfileContent()\n        header = [head.replace(\" \", \"\") for head in header]\n        \n        # read summary text file\n        facter_m = self._parseFacter_m()\n        facter_m = [fact.replace(\" \", \"\") for fact in facter_m]\n        \n        meta = {}\n        \n        # read polarizations from image file names\n        meta['polarizations'] = [re.search('[HV]{2}', os.path.basename(x)).group(0) for x in self.findfiles('^sar.')]\n        meta['product'] = header[3]\n        ################################################################################################################\n        # read start and stop time --> TODO: in what format is the start and stop time?\n        \n        try:\n            start_time = facter_m[168].split('.')[0].zfill(2) + facter_m[168].split('.')[1][:4]\n            stop_time = facter_m[170].split('.')[0].zfill(2) + facter_m[170].split('.')[1][:4]\n        except (AttributeError):\n            raise IndexError('start and stop time stamps cannot be extracted; see file facter_m.dat')\n        \n        meta['start'] = str(header[6])  # +'T'+start_time\n        meta['stop'] = str(header[6])  # +'T'+stop_time\n        ################################################################################################################\n        # read file metadata\n        meta['sensor'] = header[2]\n        ################################################################################################################\n        # read leader file name information\n        meta['acquisition_mode'] = header[12]\n        # ##############################################################################################################\n        # read map projection data \n        \n        lat = list(map(float, [header[33], header[35], header[37], header[39]]))\n        lon = list(map(float, [header[34], header[36], header[38], header[40]]))\n        \n        if len(lat) == 0 or len(lon) == 0:\n            meta['coordinates'] = self._img_get_coordinates()\n        else:\n            meta['coordinates'] = list(zip(lon, lat))\n        \n        meta['projection'] = crsConvert(4918, 'wkt')  # EPSG: 4918: ITRF97, GRS80\n        ################################################################################################################\n        # read data set summary record\n        \n        orbitsPerCycle = int(207)\n        \n        meta['orbitNumber_rel'] = int(header[7])\n        meta['cycleNumber'] = header[5]\n        meta['frameNumber'] = ''\n        meta['orbitNumber_abs'] = int(orbitsPerCycle * (meta['cycleNumber'] - 1) + meta['orbitNumber_rel'])\n        \n        meta['lines'] = int(float(facter_m[51]))\n        meta['samples'] = int(float(facter_m[50]))\n        meta['incidence'] = float(facter_m[119])\n        meta['proc_facility'] = header[73]\n        \n        meta['spacing'] = (float(header[51]), float(header[52]))\n        \n        meta['orbit'] = header[9]\n        ################################################################################################################\n        # read radiometric data record\n        \n        meta['k_dB'] = float(header[64])\n        \n        return meta\n    \n    def unpack(self, directory, overwrite=False, exist_ok=False):\n        outdir = os.path.join(directory, os.path.basename(self.file).replace('LED-', ''))\n        self._unpack(outdir, overwrite=overwrite, exist_ok=exist_ok)\n\n\nclass ESA(ID):\n    \"\"\"\n    Handler class for SAR data in ESA format (Envisat ASAR, ERS-1/2)\n    \n    Sensors:\n        * ASAR\n        * ERS1\n        * ERS2\n    \"\"\"\n    \n    def __init__(self, scene):\n        \n        self.pattern = patterns.esa\n        self.pattern_pid = r'(?P<sat_id>(?:SAR|ASA))_' \\\n                           r'(?P<image_mode>(?:IM(?:S|P|G|M|_)|AP(?:S|P|G|M|_)|WV(?:I|S|W|_)|WS(?:M|S|_)))_' \\\n                           r'(?P<processing_level>[012B][CP])'\n        \n        self.scene = os.path.realpath(scene)\n        \n        if re.search('.[EN][12]$', self.scene):\n            self.file = self.scene\n        else:\n            self.examine()\n        \n        self.meta = self.scanMetadata()\n        \n        # register the standardized meta attributes as object attributes\n        super(ESA, self).__init__(self.meta)\n    \n    def scanMetadata(self):\n        match = re.match(re.compile(self.pattern), os.path.basename(self.file))\n        match2 = re.match(re.compile(self.pattern_pid), match.group('product_id'))\n        \n        if re.search('IM__0', match.group('product_id')):\n            raise RuntimeError('product level 0 not supported (yet)')\n        \n        meta = dict()\n        sensor_lookup = {'N1': 'ASAR', 'E1': 'ERS1', 'E2': 'ERS2'}\n        meta['sensor'] = sensor_lookup[match.group('satellite_ID')]\n        meta['acquisition_mode'] = match2.group('image_mode')\n        \n        meta['image_geometry'] = 'GROUND_RANGE'\n        # product overview table: https://doi.org/10.5167/UZH-96146\n        if meta['acquisition_mode'] in ['APS', 'IMS', 'WSS']:\n            meta['product'] = 'SLC'\n            meta['image_geometry'] = 'SLANT_RANGE'\n        elif meta['acquisition_mode'] in ['APP', 'IMP']:\n            meta['product'] = 'PRI'\n        elif meta['acquisition_mode'] in ['APM', 'IMM', 'WSM']:\n            meta['product'] = 'MR'\n        else:\n            raise RuntimeError(f\"unsupported acquisition mode: '{meta['acquisition_mode']}'\")\n        \n        def val_convert(val):\n            try:\n                out = int(val)\n            except ValueError:\n                try:\n                    out = float(val)\n                except ValueError:\n                    if re.search('[0-9]{2}-[A-Z]{3}-[0-9]{2}', val):\n                        out = dateparse(val)\n                        out = out.replace(tzinfo=timezone.utc)\n                    else:\n                        out = val\n            return out\n        \n        def decode(raw):\n            pattern = r'(?P<key>[A-Z0-9_]+)\\=(\")?(?P<value>.*?)(\"|<|$)'\n            out = {}\n            coord_keys = [f'{x}_{y}_{z}'\n                          for x in ['FIRST', 'LAST']\n                          for y in ['NEAR', 'MID', 'FAR']\n                          for z in ['LAT', 'LONG']]\n            lines = raw.split('\\n')\n            for line in lines:\n                match = re.match(pattern, line)\n                if match:\n                    matchdict = match.groupdict()\n                    val = val_convert(str(matchdict['value']).strip())\n                    if matchdict['key'] in coord_keys:\n                        val *= 10 ** -6\n                    out[matchdict['key']] = val\n            return out\n        \n        with self.getFileObj(self.file) as obj:\n            origin = {}\n            mph = obj.read(1247).decode('ascii')\n            origin['MPH'] = decode(mph)\n            \n            sph_size = origin['MPH']['SPH_SIZE']\n            dsd_size = origin['MPH']['DSD_SIZE']\n            dsd_num = origin['MPH']['NUM_DSD']\n            sph_descr_size = sph_size - dsd_size * dsd_num\n            \n            sph = obj.read(sph_descr_size).decode('ascii')\n            origin['SPH'] = decode(sph)\n            \n            datasets = {}\n            for i in range(dsd_num):\n                dsd = obj.read(dsd_size).decode('ascii')\n                dataset = decode(dsd)\n                datasets[dataset.pop('DS_NAME')] = dataset\n            origin['DSD'] = datasets\n            \n            meta['origin'] = origin\n            \n            key = 'GEOLOCATION GRID ADS'\n            ds_offset = origin['DSD'][key]['DS_OFFSET']\n            ds_size = origin['DSD'][key]['DS_SIZE']\n            dsr_size = origin['DSD'][key]['DSR_SIZE']\n            obj.seek(ds_offset)\n            geo = obj.read(ds_size)\n        \n        geo = [geo[i:i + dsr_size] for i in range(0, len(geo), dsr_size)]\n        \n        keys = ['first_zero_doppler_time', 'attach_flag', 'line_num',\n                'num_lines', 'sub_sat_track', 'first_line_tie_points',\n                'spare', 'last_zero_doppler_time', 'last_line_tie_points',\n                'swath_number']\n        lengths = [12, 1, 4, 4, 4, 220, 22, 12, 220, 3, 19]\n        \n        meta['origin']['GEOLOCATION_GRID_ADS'] = []\n        for granule in geo:\n            start = 0\n            values = {}\n            for i, key in enumerate(keys):\n                value = granule[start:sum(lengths[:i + 1])]\n                if key in ['first_zero_doppler_time', 'last_zero_doppler_time']:\n                    unpack = dict(zip(('days', 'seconds', 'microseconds'),\n                                      struct.unpack('>lLL', value)))\n                    value = datetime(year=2000, month=1, day=1, tzinfo=timezone.utc)\n                    value += timedelta(**unpack)\n                elif key in ['attach_flag']:\n                    value = struct.unpack('B', value)[0]\n                elif key in ['line_num', 'num_lines']:\n                    value = struct.unpack('>L', value)[0]\n                elif key in ['sub_sat_track']:\n                    value = struct.unpack('>f', value)[0]\n                elif key in ['first_line_tie_points', 'last_line_tie_points']:\n                    sample_numbers = struct.unpack('>' + 'L' * 11, value[0:44])\n                    slant_range_times = struct.unpack('>' + 'f' * 11, value[44:88])\n                    incident_angles = struct.unpack('>' + 'f' * 11, value[88:132])\n                    latitudes = struct.unpack('>' + 'l' * 11, value[132:176])\n                    latitudes = [x / 1000000. for x in latitudes]\n                    longitudes = struct.unpack('>' + 'l' * 11, value[176:220])\n                    longitudes = [x / 1000000. for x in longitudes]\n                    value = []\n                    for j in range(11):\n                        value.append({'sample_number': sample_numbers[j],\n                                      'slant_range_time': slant_range_times[j],\n                                      'incident_angle': incident_angles[j],\n                                      'latitude': latitudes[j],\n                                      'longitude': longitudes[j]})\n                elif key == 'swath_number':\n                    value = value.decode('ascii').strip()\n                if key != 'spare':\n                    values[key] = value\n                start += lengths[i]\n            meta['origin']['GEOLOCATION_GRID_ADS'].append(values)\n        \n        lat = []\n        lon = []\n        for granule in meta['origin']['GEOLOCATION_GRID_ADS']:\n            for group in ['first', 'last']:\n                for i in range(11):\n                    lat.append(granule[f'{group}_line_tie_points'][i]['latitude'])\n                    lon.append(granule[f'{group}_line_tie_points'][i]['longitude'])\n        \n        meta['coordinates'] = list(zip(lon, lat))\n        \n        if meta['sensor'] == 'ASAR':\n            pols = [y for x, y in origin['SPH'].items() if 'TX_RX_POLAR' in x]\n            pols = [x.replace('/', '') for x in pols if len(x) == 3]\n            meta['polarizations'] = sorted(pols)\n        elif meta['sensor'] in ['ERS1', 'ERS2']:\n            meta['polarizations'] = ['VV']\n        \n        meta['orbit'] = origin['SPH']['PASS'][0]\n        meta['start'] = origin['MPH']['SENSING_START'].strftime('%Y%m%dT%H%M%S')\n        meta['stop'] = origin['MPH']['SENSING_STOP'].strftime('%Y%m%dT%H%M%S')\n        meta['spacing'] = (origin['SPH']['RANGE_SPACING'], origin['SPH']['AZIMUTH_SPACING'])\n        meta['looks'] = (origin['SPH']['RANGE_LOOKS'], origin['SPH']['AZIMUTH_LOOKS'])\n        meta['samples'] = origin['SPH']['LINE_LENGTH']\n        meta['lines'] = origin['DSD']['MDS1']['NUM_DSR']\n        \n        meta['orbitNumber_abs'] = origin['MPH']['ABS_ORBIT']\n        meta['orbitNumber_rel'] = origin['MPH']['REL_ORBIT']\n        meta['cycleNumber'] = origin['MPH']['CYCLE']\n        meta['frameNumber'] = origin['MPH']['ABS_ORBIT']\n        \n        incident_angles = []\n        for item in meta['origin']['GEOLOCATION_GRID_ADS']:\n            for key in ['first', 'last']:\n                pts = item[f'{key}_line_tie_points']\n                for pt in pts:\n                    incident_angles.append(pt['incident_angle'])\n        \n        meta['incidence_nr'] = min(incident_angles)\n        meta['incidence_fr'] = max(incident_angles)\n        meta['incidence'] = (meta['incidence_nr'] + meta['incidence_fr']) / 2\n        \n        resolution_rg, resolution_az, nesz_nr, nesz_fr = \\\n            get_resolution_nesz(sensor=meta['sensor'], mode=meta['acquisition_mode'],\n                                swath_id=origin['SPH']['SWATH'], date=meta['start'])\n        \n        meta['resolution'] = (resolution_rg, resolution_az)\n        meta['nesz'] = (nesz_nr, nesz_fr)\n        \n        meta['projection'] = crsConvert(4326, 'wkt')\n        \n        return meta\n    \n    def geo_grid(self, outname=None, driver=None, overwrite=True):\n        \"\"\"\n        get the geo grid as vector geometry\n\n        Parameters\n        ----------\n        outname: str\n            the name of the vector file to be written\n        driver: str\n            the output file format; needs to be defined if the format cannot\n            be auto-detected from the filename extension\n        overwrite: bool\n            overwrite an existing vector file?\n\n        Returns\n        -------\n        spatialist.vector.Vector or None\n            the vector object if `outname` is None, None otherwise\n\n        See also\n        --------\n        spatialist.vector.Vector.write\n        \"\"\"\n        vec = Vector(driver='MEM')\n        vec.addlayer('geogrid', 4326, ogr.wkbPoint)\n        field_defs = [\n            (\"swath\", ogr.OFTString),\n            (\"azimuthTime\", ogr.OFTDateTime),\n            (\"slantRangeTime\", ogr.OFTReal),\n            (\"line\", ogr.OFTInteger),\n            (\"pixel\", ogr.OFTInteger),\n            (\"incidenceAngle\", ogr.OFTReal)\n        ]\n        for name, ftype in field_defs:\n            field = ogr.FieldDefn(name, ftype)\n            vec.layer.CreateField(field)\n        \n        for granule in self.meta['origin']['GEOLOCATION_GRID_ADS']:\n            line_first = granule['line_num']\n            line_last = granule['line_num'] + granule['num_lines'] - 1\n            for group in ['first', 'last']:\n                meta = {'swath': granule['swath_number'],\n                        'azimuthTime': granule[f'{group}_zero_doppler_time'],\n                        'line': line_first if group == 'first' else line_last}\n                tp = granule[f'{group}_line_tie_points']\n                for i in range(11):\n                    x = tp[i]['longitude']\n                    y = tp[i]['latitude']\n                    geom = ogr.Geometry(ogr.wkbPoint)\n                    geom.AddPoint(x, y)\n                    geom.FlattenTo2D()\n                    meta['slantRangeTime'] = tp[i]['slant_range_time']\n                    meta['pixel'] = tp[i]['sample_number']\n                    meta['incidenceAngle'] = tp[i]['incident_angle']\n                    vec.addfeature(geom, fields=meta)\n        geom = None\n        if outname is None:\n            return vec\n        else:\n            vec.write(outfile=outname, driver=driver, overwrite=overwrite)\n    \n    def unpack(self, directory, overwrite=False, exist_ok=False):\n        base_file = os.path.basename(self.file).strip(r'\\.zip|\\.tar(?:\\.gz|)')\n        base_dir = os.path.basename(directory.strip('/'))\n        \n        outdir = directory if base_file == base_dir else os.path.join(directory, base_file)\n        \n        self._unpack(outdir, overwrite=overwrite, exist_ok=exist_ok)\n\n\nclass SAFE(ID):\n    \"\"\"\n    Handler class for Sentinel-1 data\n    \n    Sensors:\n        * S1A\n        * S1B\n        * S1C\n        * S1D\n\n    References:\n        * S1-RS-MDA-52-7443 Sentinel-1 IPF Auxiliary Product Specification\n        * MPC-0243 Masking \"No-value\" Pixels on GRD Products generated by the Sentinel-1 ESA IPF\n    \"\"\"\n    \n    def __init__(self, scene):\n        \n        self.scene = os.path.realpath(scene)\n        \n        self.pattern = patterns.safe\n        \n        self.pattern_ds = r'^s1[abcd]-' \\\n                          r'(?P<swath>s[1-6]|iw[1-3]?|ew[1-5]?|wv[1-2]|n[1-6])-' \\\n                          r'(?P<product>slc|grd|ocn)-' \\\n                          r'(?P<pol>hh|hv|vv|vh)-' \\\n                          r'(?P<start>[0-9]{8}t[0-9]{6})-' \\\n                          r'(?P<stop>[0-9]{8}t[0-9]{6})-' \\\n                          r'(?:[0-9]{6})-(?:[0-9a-f]{6})-' \\\n                          r'(?P<id>[0-9]{3})' \\\n                          r'\\.xml$'\n        \n        self.examine(include_folders=True)\n        \n        if not re.match(re.compile(self.pattern), os.path.basename(self.file)):\n            raise RuntimeError('folder does not match S1 scene naming convention')\n        \n        # scan the metadata XML file and add selected attributes to a meta dictionary\n        self.meta = self.scanMetadata()\n        self.meta['projection'] = crsConvert(4326, 'wkt')\n        \n        # register the standardized meta attributes as object attributes\n        super(SAFE, self).__init__(self.meta)\n        \n        self.gammafiles = {'slc': [], 'pri': [], 'grd': []}\n    \n    def removeGRDBorderNoise(self, method='pyroSAR'):\n        \"\"\"\n        mask out Sentinel-1 image border noise.\n        \n        Parameters\n        ----------\n        method: str\n            the border noise removal method to be applied; one of the following:\n            \n             - 'ESA': the pure implementation as described by ESA\n             - 'pyroSAR': the ESA method plus the custom pyroSAR refinement\n\n        Returns\n        -------\n        \n        See Also\n        --------\n        :func:`~pyroSAR.S1.removeGRDBorderNoise`\n        \"\"\"\n        S1.removeGRDBorderNoise(self, method=method)\n    \n    def geo_grid(self, outname=None, driver=None, overwrite=True):\n        \"\"\"\n        get the geo grid as vector geometry\n\n        Parameters\n        ----------\n        outname: str\n            the name of the vector file to be written\n        driver: str\n            the output file format; needs to be defined if the format cannot\n            be auto-detected from the filename extension\n        overwrite: bool\n            overwrite an existing vector file?\n\n        Returns\n        -------\n        ~spatialist.vector.Vector or None\n            the vector object if `outname` is None, None otherwise\n        \n        See also\n        --------\n        spatialist.vector.Vector.write\n        \"\"\"\n        annotations = self.findfiles(self.pattern_ds)\n        key = lambda x: re.search('-[vh]{2}-', x).group()\n        groups = groupby(sorted(annotations, key=key), key=key)\n        annotations = [list(value) for key, value in groups][0]\n        \n        vec = Vector(driver='MEM')\n        vec.addlayer('geogrid', 4326, ogr.wkbPoint25D)\n        field_defs = [\n            (\"swath\", ogr.OFTString),\n            (\"azimuthTime\", ogr.OFTDateTime),\n            (\"slantRangeTime\", ogr.OFTReal),\n            (\"line\", ogr.OFTInteger),\n            (\"pixel\", ogr.OFTInteger),\n            (\"incidenceAngle\", ogr.OFTReal),\n            (\"elevationAngle\", ogr.OFTReal),\n        ]\n        for name, ftype in field_defs:\n            field = ogr.FieldDefn(name, ftype)\n            vec.layer.CreateField(field)\n        \n        for ann in annotations:\n            with self.getFileObj(ann) as ann_xml:\n                tree = ET.fromstring(ann_xml.read())\n            swath = tree.find(\".//adsHeader/swath\").text\n            points = tree.findall(\".//geolocationGridPoint\")\n            for point in points:\n                meta = {child.tag: child.text for child in point}\n                meta[\"swath\"] = swath\n                x = float(meta.pop(\"longitude\"))\n                y = float(meta.pop(\"latitude\"))\n                z = float(meta.pop(\"height\"))\n                geom = ogr.Geometry(ogr.wkbPoint25D)\n                geom.AddPoint(x, y, z)\n                az_time = dateparse(meta[\"azimuthTime\"])\n                meta[\"azimuthTime\"] = az_time.replace(tzinfo=timezone.utc)\n                for key in [\"slantRangeTime\", \"incidenceAngle\", \"elevationAngle\"]:\n                    meta[key] = float(meta[key])\n                for key in [\"line\", \"pixel\"]:\n                    meta[key] = int(meta[key])\n                vec.addfeature(geom, fields=meta)\n        geom = None\n        if outname is None:\n            return vec\n        else:\n            vec.write(outfile=outname, driver=driver, overwrite=overwrite)\n    \n    def getOSV(self, osvdir=None, osvType='POE', returnMatch=False, useLocal=True, timeout=300, url_option=1):\n        \"\"\"\n        download Orbit State Vector files for the scene\n\n        Parameters\n        ----------\n        osvdir: str\n            the directory of OSV files; subdirectories POEORB and RESORB are created automatically;\n            if no directory is defined, the standard SNAP auxdata location is used\n        osvType: str or list[str]\n            the type of orbit file either 'POE', 'RES' or a list of both;\n            if both are selected, the best matching file will be retrieved. I.e., POE if available and RES otherwise\n        returnMatch: bool\n            return the best matching orbit file?\n        useLocal: bool\n            use locally existing files and do not search for files online if the right file has been found?\n        timeout: int or tuple or None\n            the timeout in seconds for downloading OSV files as provided to :func:`requests.get`\n        url_option: int\n            the OSV download URL option; see :meth:`pyroSAR.S1.OSV.catch` for options\n\n        Returns\n        -------\n        str or None\n            the best matching OSV file if `returnMatch` is True or None otherwise\n        \n        See Also\n        --------\n        :class:`pyroSAR.S1.OSV`\n        \"\"\"\n        with S1.OSV(osvdir, timeout=timeout) as osv:\n            if useLocal:\n                match = osv.match(sensor=self.sensor, timestamp=self.start,\n                                  osvtype=osvType)\n                if match is not None:\n                    return match if returnMatch else None\n            \n            if osvType in ['POE', 'RES']:\n                files = osv.catch(sensor=self.sensor, osvtype=osvType,\n                                  start=self.start, stop=self.stop,\n                                  url_option=url_option)\n            elif sorted(osvType) == ['POE', 'RES']:\n                files = osv.catch(sensor=self.sensor, osvtype='POE',\n                                  start=self.start, stop=self.stop,\n                                  url_option=url_option)\n                if len(files) == 0:\n                    files = osv.catch(sensor=self.sensor, osvtype='RES',\n                                      start=self.start, stop=self.stop,\n                                      url_option=url_option)\n            else:\n                msg = \"osvType must either be 'POE', 'RES' or a list of both\"\n                raise TypeError(msg)\n            \n            osv.retrieve(files)\n            \n            if returnMatch:\n                match = osv.match(sensor=self.sensor, timestamp=self.start,\n                                  osvtype=osvType)\n                return match\n    \n    def quicklook(self, outname, format='kmz', na_transparent=True):\n        \"\"\"\n        Write a quicklook file for the scene.\n        \n        Parameters\n        ----------\n        outname: str\n            the file to write\n        format: str\n            the quicklook format. Currently supported options:\n            \n             - kmz\n        na_transparent: bool\n            make NA values transparent?\n\n        Returns\n        -------\n\n        \"\"\"\n        if self.product not in ['GRD', 'SLC']:\n            msg = 'this method has only been implemented for GRD and SLC, not {}'\n            raise RuntimeError(msg.format(self.product))\n        \n        if format != 'kmz':\n            raise RuntimeError('currently only kmz is supported as format')\n        kml_name = self.findfiles('map-overlay.kml')[0]\n        png_name = self.findfiles('quick-look.png')[0]\n        with zf.ZipFile(outname, 'w') as out:\n            with self.getFileObj(kml_name) as kml_in:\n                kml = kml_in.getvalue().decode('utf-8')\n                kml = kml.replace('Sentinel-1 Map Overlay', self.outname_base())\n                out.writestr('doc.kml', data=kml)\n            with self.getFileObj(png_name) as png_in:\n                if na_transparent:\n                    img = Image.open(png_in)\n                    img = img.convert('RGBA')\n                    datas = img.getdata()\n                    newData = []\n                    for item in datas:\n                        if item[0] == 0 and item[1] == 0 and item[2] == 0:\n                            newData.append((0, 0, 0, 0))\n                        else:\n                            newData.append(item)\n                    img.putdata(newData)\n                    buf = BytesIO()\n                    img.save(buf, format='png')\n                    out.writestr('quick-look.png', buf.getvalue())\n                else:\n                    out.writestr('quick-look.png', data=png_in.getvalue())\n    \n    def resolution(self):\n        \"\"\"\n        Compute the mid-swath resolution of the Sentinel-1 product. For GRD products the resolution is expressed in\n        ground range and in slant range otherwise.\n        \n        References:\n            * https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/resolutions/level-1-single-look-complex\n            * https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/resolutions/level-1-ground-range-detected\n            * https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/document-library/-/asset_publisher/1dO7RF5fJMbd/content/sentinel-1-product-definition\n        \n        Returns\n        -------\n        tuple[float]\n            the resolution as (range, azimuth)\n        \"\"\"\n        if 'resolution' in self.meta.keys():\n            return self.meta['resolution']\n        if self.product not in ['GRD', 'SLC']:\n            msg = 'this method has only been implemented for GRD and SLC, not {}'\n            raise RuntimeError(msg.format(self.product))\n        \n        annotations = self.findfiles(self.pattern_ds)\n        key = lambda x: re.search('-[vh]{2}-', x).group()\n        groups = groupby(sorted(annotations, key=key), key=key)\n        annotations = [list(value) for key, value in groups][0]\n        proc_pars = []  # processing parameters per sub-swath\n        sp_az = []  # azimuth pixel spacings per sub-swath\n        ti_az = []  # azimuth time intervals per sub-swath\n        for ann in annotations:\n            with self.getFileObj(ann) as ann_xml:\n                tree = ET.fromstring(ann_xml.read())\n                par = tree.findall('.//swathProcParams')\n                proc_pars.extend(par)\n                for i in range(len(par)):\n                    sp_az.append(float(tree.find('.//azimuthPixelSpacing').text))\n                    ti_az.append(float(tree.find('.//azimuthTimeInterval').text))\n        c = 299792458.0  # speed of light\n        # see Sentinel-1 product definition for Hamming window coefficients\n        # and Impulse Response Width (IRW) broadening factors:\n        coefficients = [0.52, 0.6, 0.61, 0.62, 0.63, 0.65, 0.70, 0.72, 0.73, 0.75]\n        b_factors = [1.54, 1.32, 1.3, 1.28, 1.27, 1.24, 1.18, 1.16, 1.15, 1.13]\n        resolutions_rg = []\n        resolutions_az = []\n        for i, par in enumerate(proc_pars):\n            # computation of slant range resolution\n            rg_proc = par.find('rangeProcessing')\n            wrg = float(rg_proc.find('windowCoefficient').text)\n            brg = float(rg_proc.find('processingBandwidth').text)\n            lbrg = float(rg_proc.find('lookBandwidth').text)\n            lrg = brg / lbrg\n            kbrg = b_factors[coefficients.index(wrg)]\n            resolutions_rg.append(0.886 * c / (2 * brg) * kbrg * lrg)\n            \n            # computation of azimuth resolution; yet to be checked for correctness\n            az_proc = par.find('azimuthProcessing')\n            waz = float(az_proc.find('windowCoefficient').text)\n            baz = float(az_proc.find('processingBandwidth').text)\n            lbaz = float(az_proc.find('lookBandwidth').text)\n            laz = baz / lbaz\n            kbaz = b_factors[coefficients.index(waz)]\n            vsat = sp_az[i] / ti_az[i]\n            resolutions_az.append(0.886 * vsat / baz * kbaz * laz)\n        \n        resolution_rg = median(resolutions_rg)\n        resolution_az = median(resolutions_az)\n        \n        if self.meta['image_geometry'] == 'GROUND_RANGE':\n            resolution_rg /= math.sin(math.radians(self.meta['incidence']))\n        \n        self.meta['resolution'] = resolution_rg, resolution_az\n        return self.meta['resolution']\n    \n    def scanMetadata(self):\n        with self.getFileObj(self.findfiles('manifest.safe')[0]) as input:\n            manifest = input.getvalue()\n        namespaces = getNamespaces(manifest)\n        tree = ET.fromstring(manifest)\n        \n        meta = dict()\n        key = 's1sarl1'\n        obj_prod = tree.find('.//{}:productType'.format(key), namespaces)\n        if obj_prod == None:\n            key = 's1sarl2'\n            obj_prod = tree.find('.//{}:productType'.format(key), namespaces)\n        \n        meta['product'] = obj_prod.text\n        \n        acqmode = tree.find('.//{}:mode'.format(key), namespaces).text\n        if acqmode == 'SM':\n            meta['acquisition_mode'] = tree.find('.//{}:swath'.format(key), namespaces).text\n        else:\n            meta['acquisition_mode'] = acqmode\n        meta['acquisition_time'] = dict(\n            [(x, tree.find('.//safe:{}Time'.format(x), namespaces).text) for x in ['start', 'stop']])\n        meta['start'], meta['stop'] = (self.parse_date(meta['acquisition_time'][x]) for x in ['start', 'stop'])\n        meta['coordinates'] = [tuple([float(y) for y in x.split(',')][::-1]) for x in\n                               tree.find('.//gml:coordinates', namespaces).text.split()]\n        meta['orbit'] = tree.find('.//s1:pass', namespaces).text[0]\n        \n        meta['orbitNumber_abs'] = int(tree.find('.//safe:orbitNumber[@type=\"start\"]', namespaces).text)\n        meta['orbitNumber_rel'] = int(tree.find('.//safe:relativeOrbitNumber[@type=\"start\"]', namespaces).text)\n        meta['cycleNumber'] = int(tree.find('.//safe:cycleNumber', namespaces).text)\n        meta['frameNumber'] = int(tree.find('.//{}:missionDataTakeID'.format(key), namespaces).text)\n        \n        meta['orbitNumbers_abs'] = dict(\n            [(x, int(tree.find('.//safe:orbitNumber[@type=\"{0}\"]'.format(x), namespaces).text)) for x in\n             ['start', 'stop']])\n        meta['orbitNumbers_rel'] = dict(\n            [(x, int(tree.find('.//safe:relativeOrbitNumber[@type=\"{0}\"]'.format(x), namespaces).text)) for x in\n             ['start', 'stop']])\n        key_pol = './/{}:transmitterReceiverPolarisation'.format(key)\n        meta['polarizations'] = [x.text for x in tree.findall(key_pol, namespaces)]\n        meta['category'] = tree.find('.//{}:productClass'.format(key), namespaces).text\n        family = tree.find('.//safe:familyName', namespaces).text.replace('ENTINEL-', '')\n        number = tree.find('.//safe:number', namespaces).text\n        meta['sensor'] = family + number\n        meta['IPF_version'] = float(tree.find('.//safe:software', namespaces).attrib['version'])\n        sliced = tree.find('.//{}:sliceProductFlag'.format(key), namespaces).text == 'true'\n        if sliced:\n            meta['sliceNumber'] = int(tree.find('.//{}:sliceNumber'.format(key), namespaces).text)\n            meta['totalSlices'] = int(tree.find('.//{}:totalSlices'.format(key), namespaces).text)\n        else:\n            meta['sliceNumber'] = None\n            meta['totalSlices'] = None\n        \n        if meta['product'] == 'OCN':\n            meta['spacing'] = -1\n            meta['samples'] = -1\n            meta['lines'] = -1\n        else:\n            annotations = self.findfiles(self.pattern_ds)\n            key = lambda x: re.search('-[vh]{2}-', x).group()\n            groups = groupby(sorted(annotations, key=key), key=key)\n            annotations = [list(value) for key, value in groups][0]\n            ann_trees = []\n            for ann in annotations:\n                with self.getFileObj(ann) as ann_xml:\n                    ann_trees.append(ET.fromstring(ann_xml.read()))\n            \n            sp_rg = [float(x.find('.//rangePixelSpacing').text) for x in ann_trees]\n            sp_az = [float(x.find('.//azimuthPixelSpacing').text) for x in ann_trees]\n            meta['spacing'] = (median(sp_rg), median(sp_az))\n            \n            looks_rg = [float(x.find('.//rangeProcessing/numberOfLooks').text) for x in ann_trees]\n            looks_az = [float(x.find('.//azimuthProcessing/numberOfLooks').text) for x in ann_trees]\n            meta['looks'] = (median(looks_rg), median(looks_az))\n            \n            samples = [x.find('.//imageAnnotation/imageInformation/numberOfSamples').text for x in ann_trees]\n            meta['samples'] = sum([int(x) for x in samples])\n            \n            lines = [x.find('.//imageAnnotation/imageInformation/numberOfLines').text for x in ann_trees]\n            meta['lines'] = sum([int(x) for x in lines])\n            \n            heading = median(float(x.find('.//platformHeading').text) for x in ann_trees)\n            meta['heading'] = heading if heading > 0 else heading + 360\n            \n            incidence = [float(x.find('.//incidenceAngleMidSwath').text) for x in ann_trees]\n            meta['incidence'] = median(incidence)\n            \n            meta['image_geometry'] = ann_trees[0].find('.//projection').text.replace(' ', '_').upper()\n        \n        return meta\n    \n    def unpack(self, directory, overwrite=False, exist_ok=False):\n        outdir = os.path.join(directory, os.path.basename(self.file))\n        self._unpack(outdir, overwrite=overwrite, exist_ok=exist_ok)\n\n\nclass TSX(ID):\n    \"\"\"\n    Handler class for TerraSAR-X and TanDEM-X data\n    \n    Sensors:\n        * TSX1\n        * TDX1\n\n    References:\n        * TX-GS-DD-3302  TerraSAR-X Basic Product Specification Document\n        * TX-GS-DD-3303  TerraSAR-X Experimental Product Description\n        * TD-GS-PS-3028  TanDEM-X Experimental Product Description\n        * TerraSAR-X Image Product Guide (Airbus Defence and Space)\n    \n    Acquisition modes:\n        * ST:    Staring Spotlight\n        * HS:    High Resolution SpotLight\n        * HS300: High Resolution SpotLight 300 MHz\n        * SL:    SpotLight\n        * SM:    StripMap\n        * SC:    ScanSAR\n        * WS:    Wide ScanSAR\n    \n    Polarisation modes:\n        * Single (S): all acquisition modes\n        * Dual   (D): High Resolution SpotLight (HS), SpotLight (SL) and StripMap (SM)\n        * Twin   (T): StripMap (SM) (experimental)\n        * Quad   (Q): StripMap (SM) (experimental)\n    \n    Products:\n        * SSC: Single Look Slant Range Complex\n        * MGD: Multi Look Ground Range Detected\n        * GEC: Geocoded Ellipsoid Corrected\n        * EEC: Enhanced Ellipsoid Corrected\n    \"\"\"\n    \n    def __init__(self, scene):\n        if isinstance(scene, str):\n            self.scene = os.path.realpath(scene)\n            \n            self.pattern = patterns.tsx\n            \n            self.pattern_ds = r'^IMAGE_(?P<pol>HH|HV|VH|VV)_(?:SRA|FWD|AFT)_(?P<beam>[^\\.]+)\\.(cos|tif)$'\n            self.examine(include_folders=False)\n            \n            if not re.match(re.compile(self.pattern), os.path.basename(self.file)):\n                raise RuntimeError('folder does not match TSX scene naming convention')\n            \n            self.meta = self.scanMetadata()\n            self.meta['projection'] = crsConvert(4326, 'wkt')\n        \n        super(TSX, self).__init__(self.meta)\n    \n    def scanMetadata(self):\n        annotation = self.getFileObj(self.file).getvalue()\n        namespaces = getNamespaces(annotation)\n        tree = ET.fromstring(annotation)\n        meta = dict()\n        meta['sensor'] = tree.find('.//generalHeader/mission', namespaces).text.replace('-', '')\n        meta['product'] = tree.find('.//orderInfo/productVariant', namespaces).text\n        meta['orbit'] = tree.find('.//missionInfo/orbitDirection', namespaces).text[0]\n        meta['polarizations'] = [x.text for x in\n                                 tree.findall('.//acquisitionInfo/polarisationList/polLayer', namespaces)]\n        \n        meta['orbitNumber_abs'] = int(tree.find('.//missionInfo/absOrbit', namespaces).text)\n        meta['orbitNumber_rel'] = int(tree.find('.//missionInfo/relOrbit', namespaces).text)\n        meta['cycleNumber'] = int(tree.find('.//missionInfo/orbitCycle', namespaces).text)\n        meta['frameNumber'] = int(tree.find('.//inputData/uniqueDataTakeID', namespaces).text)\n        \n        meta['acquisition_mode'] = tree.find('.//acquisitionInfo/imagingMode', namespaces).text\n        meta['start'] = self.parse_date(tree.find('.//sceneInfo/start/timeUTC', namespaces).text)\n        meta['stop'] = self.parse_date(tree.find('.//sceneInfo/stop/timeUTC', namespaces).text)\n        spacing_row = float(tree.find('.//imageDataInfo/imageRaster/rowSpacing', namespaces).text)\n        spacing_col = float(tree.find('.//imageDataInfo/imageRaster/columnSpacing', namespaces).text)\n        meta['spacing'] = (spacing_col, spacing_row)\n        meta['samples'] = int(tree.find('.//imageDataInfo/imageRaster/numberOfColumns', namespaces).text)\n        meta['lines'] = int(tree.find('.//imageDataInfo/imageRaster/numberOfRows', namespaces).text)\n        rlks = float(tree.find('.//imageDataInfo/imageRaster/rangeLooks', namespaces).text)\n        azlks = float(tree.find('.//imageDataInfo/imageRaster/azimuthLooks', namespaces).text)\n        meta['looks'] = (rlks, azlks)\n        meta['incidence'] = float(tree.find('.//sceneInfo/sceneCenterCoord/incidenceAngle', namespaces).text)\n        \n        geocs = self.getFileObj(self.findfiles('GEOREF.xml')[0]).getvalue()\n        tree = ET.fromstring(geocs)\n        pts = tree.findall('.//gridPoint')\n        lat = [float(x.find('lat').text) for x in pts]\n        lon = [float(x.find('lon').text) for x in pts]\n        # shift lon in case of west direction.\n        lon = [x - 360 if x > 180 else x for x in lon]\n        meta['coordinates'] = list(zip(lon, lat))\n        \n        return meta\n    \n    def unpack(self, directory, overwrite=False, exist_ok=False):\n        match = self.findfiles(self.pattern, True)\n        header = [x for x in match if not x.endswith('xml') and 'iif' not in x][0].replace(self.scene, '').strip('/')\n        outdir = os.path.join(directory, os.path.basename(header))\n        self._unpack(outdir, offset=header, overwrite=overwrite, exist_ok=exist_ok)\n\n\nclass TDM(TSX):\n    \"\"\"\n    Handler class for TerraSAR-X and TanDEM-X experimental data\n    \n    Sensors:\n        * TDM1\n\n    References:\n        * TD-GS-PS-3028  TanDEM-X Experimental Product Description\n    \n    Acquisition modes:\n        * HS:    High Resolution SpotLight\n        * SL:    SpotLight\n        * SM:    StripMap\n    \n    Polarisation modes:\n        * Single (S): all acquisition modes\n        * Dual   (D): High Resolution SpotLight (HS), SpotLight (SL) and StripMap (SM)\n        * Twin   (T): StripMap (SM) (experimental)\n        * Quad   (Q): StripMap (SM) (experimental)\n    \n    Products:\n        * CoSSCs: (bi-static) SAR co-registered single look slant range complex products (CoSSCs)\n\n\n    Examples\n    ----------\n    Ingest all Tandem-X Bistatic scenes in a directory and its sub-directories into the database:\n\n    >>> from pyroSAR import Archive, identify\n    >>> from spatialist.ancillary import finder\n    >>> dbfile = '/.../scenelist.db'\n    >>> archive_tdm = '/.../TDM/'\n    >>> scenes_tdm = finder(archive_tdm, [r'^TDM1.*'], foldermode=2, regex=True, recursive=True)\n    >>> with Archive(dbfile) as archive:\n    >>>     archive.insert(scenes_tdm)\n    \"\"\"\n    \n    def __init__(self, scene):\n        self.scene = os.path.realpath(scene)\n        \n        self.pattern = patterns.tdm\n        \n        self.pattern_ds = r'^IMAGE_(?P<pol>HH|HV|VH|VV)_(?:SRA|FWD|AFT)_(?P<beam>[^\\.]+)\\.(cos|tif)$'\n        self.examine(include_folders=False)\n        \n        if not re.match(re.compile(self.pattern), os.path.basename(self.file)):\n            raise RuntimeError('folder does not match TDM scene naming convention')\n        \n        self.meta = self.scanMetadata()\n        self.meta['projection'] = crsConvert(4326, 'wkt')\n        \n        super(TDM, self).__init__(self.meta)\n    \n    def scanMetadata(self):\n        annotation = self.getFileObj(self.file).getvalue()\n        namespaces = getNamespaces(annotation)\n        tree = ET.fromstring(annotation)\n        meta = dict()\n        meta['sensor'] = tree.find('.//commonAcquisitionInfo/missionID', namespaces).text.replace('-', '')\n        meta['product'] = tree.find('.//productInfo/productType', namespaces).text\n        meta['SAT1'] = tree.find('.//commonAcquisitionInfo/satelliteIDsat1', namespaces).text\n        meta['SAT2'] = tree.find('.//commonAcquisitionInfo/satelliteIDsat2', namespaces).text\n        meta['inSARmasterID'] = tree.find('.//commonAcquisitionInfo/inSARmasterID', namespaces).text\n        pattern = './/commonAcquisitionInfo/satelliteID{}'.format(meta['inSARmasterID'].lower())\n        meta['inSARmaster'] = tree.find(pattern, namespaces).text.replace('-', '')\n        \n        pattern = './/commonAcquisitionInfo/operationsInfo/acquisitionItemID'\n        meta['acquisitionItemID'] = int(tree.find(pattern, namespaces).text)\n        \n        meta['effectiveBaseline'] = float(tree.find('.//acquisitionGeometry/effectiveBaseline', namespaces).text)\n        meta['heightOfAmbiguity'] = float(tree.find('.//acquisitionGeometry/heightOfAmbiguity', namespaces).text)\n        meta['distanceActivePos'] = float(tree.find('.//acquisitionGeometry/distanceActivePos', namespaces).text)\n        meta['distanceTracks'] = float(tree.find('.//acquisitionGeometry/distanceTracks', namespaces).text)\n        \n        meta['cooperativeMode'] = tree.find('.//commonAcquisitionInfo/cooperativeMode', namespaces).text\n        \n        if meta['cooperativeMode'].lower() == \"bistatic\":\n            meta['bistatic'] = True\n        else:\n            meta['bistatic'] = False\n        \n        meta['orbit'] = tree.find('.//acquisitionGeometry/orbitDirection', namespaces).text[0]\n        \n        pattern = \".//productComponents/component[@componentClass='imageData']/file/location/name\"\n        elements = tree.findall(pattern, )\n        self.primary_scene = os.path.join(self.scene, elements[0].text)\n        self.secondary_scene = os.path.join(self.scene, elements[1].text)\n        meta[\"SAT1\"] = TSX(self.primary_scene).scanMetadata()\n        meta[\"SAT2\"] = TSX(self.secondary_scene).scanMetadata()\n        \n        meta['start'] = self.parse_date(tree.find('.//orbitHeader/firstStateTime/firstStateTimeUTC', namespaces).text)\n        meta['stop'] = self.parse_date(tree.find('.//orbitHeader/lastStateTime/lastStateTimeUTC', namespaces).text)\n        meta['samples'] = int(tree.find('.//coregistration/coregRaster/samples', namespaces).text)\n        meta['lines'] = int(tree.find('.//coregistration/coregRaster/lines', namespaces).text)\n        rlks = float(tree.find('.//processingInfo/inSARProcessing/looks/range', namespaces).text)\n        azlks = float(tree.find('.//processingInfo/inSARProcessing/looks/azimuth', namespaces).text)\n        meta['looks'] = (rlks, azlks)\n        meta['incidence'] = float(tree.find('.//commonSceneInfo/sceneCenterCoord/incidenceAngle', namespaces).text)\n        \n        meta['orbit'] = meta[meta['inSARmasterID']]['orbit']\n        meta['polarizations'] = meta[meta['inSARmasterID']]['polarizations']\n        \n        meta['orbitNumber_abs'] = meta[meta['inSARmasterID']]['orbitNumber_abs']\n        meta['orbitNumber_rel'] = meta[meta['inSARmasterID']]['orbitNumber_rel']\n        meta['cycleNumber'] = meta[meta['inSARmasterID']]['cycleNumber']\n        meta['frameNumber'] = meta[meta['inSARmasterID']]['frameNumber']\n        \n        meta['acquisition_mode'] = meta[meta['inSARmasterID']]['acquisition_mode']\n        meta['start'] = meta[meta['inSARmasterID']]['start']\n        meta['stop'] = meta[meta['inSARmasterID']]['stop']\n        meta['spacing'] = meta[meta['inSARmasterID']]['spacing']\n        meta['samples'] = meta[meta['inSARmasterID']]['samples']\n        meta['lines'] = meta[meta['inSARmasterID']]['lines']\n        meta['looks'] = meta[meta['inSARmasterID']]['looks']\n        meta['incidence'] = meta[meta['inSARmasterID']]['incidence']\n        \n        pts = tree.findall('.//sceneCornerCoord')\n        lat = [float(x.find('lat').text) for x in pts]\n        lon = [float(x.find('lon').text) for x in pts]\n        # shift lon in case of west direction.\n        lon = [x - 360 if x > 180 else x for x in lon]\n        meta['coordinates'] = list(zip(lon, lat))\n        \n        return meta\n\n\ndef getFileObj(scene, filename):\n    \"\"\"\n    Load a file in a SAR scene archive into a readable file object.\n\n    Parameters\n    ----------\n    scene: str\n        the scene archive. Can be either a directory or a compressed archive of type `zip` or `tar.gz`.\n    filename: str\n        the name of a file in the scene archive, easiest to get with method :meth:`~ID.findfiles`\n\n    Returns\n    -------\n    ~io.BytesIO\n        a file object\n    \"\"\"\n    membername = filename.replace(scene, '').strip(r'\\/')\n    \n    if not os.path.exists(scene):\n        raise RuntimeError('scene does not exist')\n    \n    if os.path.isdir(scene):\n        obj = BytesIO()\n        with open(filename, 'rb') as infile:\n            obj.write(infile.read())\n        obj.seek(0)\n    \n    # the scene consists of a single file\n    elif os.path.isfile(scene) and scene == filename:\n        obj = BytesIO()\n        with open(filename, 'rb') as infile:\n            obj.write(infile.read())\n        obj.seek(0)\n    \n    elif zf.is_zipfile(scene):\n        obj = BytesIO()\n        with zf.ZipFile(scene, 'r') as zip:\n            obj.write(zip.open(membername).read())\n        obj.seek(0)\n    \n    elif tf.is_tarfile(scene):\n        obj = BytesIO()\n        tar = tf.open(scene, 'r:gz')\n        obj.write(tar.extractfile(membername).read())\n        tar.close()\n        obj.seek(0)\n    else:\n        raise RuntimeError('input must be either a file name or a location in an zip or tar archive')\n    return obj\n\n\ndef parse_date(x):\n    \"\"\"\n    this function gathers known time formats provided in the different SAR products and converts them to a common\n    standard of the form YYYYMMDDTHHMMSS\n\n    Parameters\n    ----------\n    x: str or ~datetime.datetime\n        the time stamp to be converted\n\n    Returns\n    -------\n    str\n        the converted time stamp in format YYYYmmddTHHMMSS\n    \"\"\"\n    if isinstance(x, datetime):\n        return x.strftime('%Y%m%dT%H%M%S')\n    elif isinstance(x, str):\n        for timeformat in ['%d-%b-%Y %H:%M:%S.%f',\n                           '%Y%m%d%H%M%S%f',\n                           '%Y-%m-%dT%H:%M:%S.%f',\n                           '%Y-%m-%dT%H:%M:%S.%fZ',\n                           '%Y%m%d %H:%M:%S.%f']:\n            try:\n                return strftime('%Y%m%dT%H%M%S', strptime(x, timeformat))\n            except (TypeError, ValueError):\n                continue\n        raise ValueError('unknown time format; check function parse_date')\n    else:\n        raise ValueError('input must be either a string or a datetime object')\n"
  },
  {
    "path": "pyroSAR/examine.py",
    "content": "###############################################################################\n# Examination of SAR processing software\n# Copyright (c) 2019-2026, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\nimport json\nimport os\nimport shutil\nimport re\nimport warnings\nimport platform\nimport subprocess as sp\nimport importlib.resources\n\nfrom pyroSAR.config import ConfigHandler\nfrom spatialist.ancillary import finder, run\n\nimport logging\n\nlog = logging.getLogger(__name__)\n\n__config__ = ConfigHandler()\n\n\nclass ExamineSnap(object):\n    \"\"\"\n    Class to check if ESA SNAP is installed.\n    Upon initialization, this class searches for relevant binaries and the accompanying\n    relative directory structure, which uniquely identify an ESA SNAP installation on a system.\n    First, all relevant file and folder names are read from the pyroSAR config file if it exists\n    and their existence is verified.\n    If this fails, a system check is performed to find relevant binaries in the system PATH variable and\n    additional files and folders relative to them.\n    In case SNAP is not installed, a default `snap.auxdata.properties` file delivered with pyroSAR will be copied to\n    `$HOME/.snap/etc` so that SNAP download URLS and local directory structure can be adapted by other software.\n    \n    SNAP configuration can be read and modified via the attribute `snap_properties` of type\n    :class:`~pyroSAR.examine.SnapProperties` or the properties :attr:`~pyroSAR.examine.ExamineSnap.userpath` and\n    :attr:`~pyroSAR.examine.ExamineSnap.auxdatapath`.\n    \"\"\"\n    _version_dict = None\n    \n    def __init__(self):\n        # update legacy config files\n        if 'OUTPUT' in __config__.sections:\n            __config__.remove_section('OUTPUT')\n        if 'SNAP' in __config__.sections:\n            snap_keys = __config__.keys('SNAP')\n            for key in ['auxdata', 'auxdatapath', 'properties']:\n                if key in snap_keys:\n                    __config__.remove_option(section='SNAP', key=key)\n        \n        # define some attributes which identify SNAP\n        self.identifiers = ['path', 'gpt', 'etc']\n        \n        # a list of relevant sections\n        self.sections = ['SNAP', 'SNAP_SUFFIX']\n        \n        # set attributes path, gpt, etc, __suffices\n        self.__read_config()\n        \n        # if SNAP could not be identified from the config attributes, do a system search for it\n        # sets attributes path, gpt, etc\n        if not self.__is_identified():\n            log.debug('identifying SNAP')\n            self.__identify_snap()\n        \n        # if SNAP cannot be identified, copy the snap.auxdata.properties file to $HOME/.snap/etc\n        if not self.__is_identified():\n            self.etc = os.path.join(os.path.expanduser('~'), '.snap', 'etc')\n            os.makedirs(self.etc, exist_ok=True)\n            dst = os.path.join(self.etc, 'snap.auxdata.properties')\n            if not os.path.isfile(dst):\n                dir_data = importlib.resources.files('pyroSAR') / 'snap' / 'data'\n                src = str(dir_data / 'snap.auxdata.properties')\n                log.debug(f'creating {dst}')\n                shutil.copyfile(src, dst)\n        \n        # if the SNAP suffices attribute was not yet identified,\n        # point it to the default file delivered with pyroSAR\n        if not hasattr(self, '__suffices'):\n            dir_data = importlib.resources.files('pyroSAR') / 'snap' / 'data'\n            fname_suffices = str(dir_data / 'snap.suffices.properties')\n            with open(fname_suffices, 'r') as infile:\n                content = infile.read().split('\\n')\n            self.__suffices = {k: v for k, v in [x.split('=') for x in content]}\n        \n        # SNAP property read/modification interface\n        self.snap_properties = SnapProperties(path=os.path.dirname(self.etc))\n        \n        # update the config file: this scans for config changes and re-writes the config file if any are found\n        self.__update_config()\n    \n    def __getattr__(self, item):\n        if item in ['path', 'gpt']:\n            msg = ('SNAP could not be identified. If you have installed it '\n                   'please add the path to the SNAP executables (bin subdirectory) '\n                   'to the PATH environment. E.g. in the Linux .bashrc file add '\n                   'the following line:\\nexport PATH=$PATH:path/to/snap/bin\"')\n        else:\n            msg = \"'ExamineSnap' object has no attribute '{}'\".format(item)\n        raise AttributeError(msg)\n    \n    def __is_identified(self):\n        \"\"\"\n        Check if SNAP has been properly identified, i.e. all paths in `self.identifiers`\n        have been detected and confirmed.\n        \n        Returns\n        -------\n        bool\n        \"\"\"\n        return sum([hasattr(self, x) for x in self.identifiers]) == len(self.identifiers)\n    \n    def __identify_snap(self):\n        \"\"\"\n        do a comprehensive search for an ESA SNAP installation\n        \n        Returns\n        -------\n        bool\n            has the SNAP properties file been changed?\n        \"\"\"\n        # create a list of possible SNAP executables\n        defaults = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']\n        paths = os.environ['PATH'].split(os.path.pathsep)\n        options = [os.path.join(path, option) for path in paths for option in defaults]\n        options = [x for x in options if os.path.isfile(x)]\n        \n        if not hasattr(self, 'path') or not os.path.isfile(self.path):\n            executables = options\n        else:\n            executables = [self.path] + options\n        \n        if len(executables) == 0:\n            log.debug(\"could not detect any potential 'snap' executables\")\n        \n        # for each possible SNAP executable, check whether additional files and directories exist relative to it\n        # to confirm whether it actually is an ESA SNAP installation or something else like e.g. the Ubuntu App Manager\n        for path in executables:\n            log.debug('checking candidate {}'.format(path))\n            if os.path.islink(path):\n                path = os.path.realpath(path)\n            \n            # check whether a directory etc exists relative to the SNAP executable\n            etc = os.path.join(os.path.dirname(os.path.dirname(path)), 'etc')\n            if not os.path.isdir(etc):\n                log.debug(\"could not find the 'etc' directory\")\n                continue\n            \n            # check the content of the etc directory\n            config_files = os.listdir(etc)\n            expected = ['snap.auxdata.properties', 'snap.clusters',\n                        'snap.conf', 'snap.properties']\n            for name in expected:\n                if name not in config_files:\n                    log.debug(f\"could not find the '{name}' file\")\n                    continue\n            \n            # identify the gpt executable\n            gpt_candidates = finder(os.path.dirname(path), ['gpt', 'gpt.exe'])\n            if len(gpt_candidates) == 0:\n                log.debug(\"could not find the 'gpt' executable\")\n                continue\n            else:\n                gpt = gpt_candidates[0]\n            \n            self.path = path\n            self.etc = etc\n            self.gpt = gpt\n            return\n    \n    def __read_config(self):\n        \"\"\"\n        This method reads the config.ini to examine the snap paths.\n        If the snap paths are not in the config.ini or the paths are\n        wrong they will be automatically created.\n\n        Returns\n        -------\n\n        \"\"\"\n        for attr in self.identifiers:\n            self.__read_config_attr(attr, section='SNAP')\n        \n        suffices = {}\n        if 'SNAP_SUFFIX' in __config__.sections:\n            suffices = __config__['SNAP_SUFFIX']\n        if len(suffices.keys()) > 0:\n            self.__suffices = suffices\n    \n    def __read_config_attr(self, attr, section):\n        \"\"\"\n        read an attribute from the config file and set it as an object attribute\n        \n        Parameters\n        ----------\n        attr: str\n            the attribute name\n        section: str\n            the config section to read the attribute from\n        \n        Returns\n        -------\n        \n        \"\"\"\n        if section in __config__.sections:\n            if attr in __config__[section].keys():\n                val = __config__[section][attr]\n                if os.path.exists(val):\n                    # log.info('setting attribute {}'.format(attr))\n                    setattr(self, attr, val)\n    \n    def __read_version_dict(self):\n        log.debug('reading SNAP version information')\n        out = {}\n        \n        cmd = [self.path, '--nosplash', '--nogui', '--modules',\n               '--list', '--refresh']\n        if platform.system() == 'Windows':\n            cmd.extend(['--console', 'suppress'])\n        \n        # fix Exception in thread \"main\" java.awt.AWTError: Can't connect to\n        # X11 window server using 'xyz' as the value of the DISPLAY variable.\n        env = os.environ.copy()\n        env['DISPLAY'] = ''\n        \n        proc = sp.Popen(args=cmd, stdout=sp.PIPE, stderr=sp.STDOUT,\n                        text=True, encoding='utf-8', bufsize=1,\n                        env=env)\n        \n        counter = 0\n        lines = []\n        lines_info = []\n        for line in proc.stdout:\n            line = line.rstrip()\n            lines.append(line)\n            if line.startswith('---'):\n                counter += 1\n            else:\n                if counter == 1:\n                    lines_info.append(line)\n            if counter == 2:\n                proc.terminate()\n        proc.wait()\n        \n        pattern = r'([a-z.]*)\\s+([0-9.]+)\\s+(.*)'\n        for line in lines_info:\n            code, version, state = re.search(pattern=pattern, string=line).groups()\n            out[code] = {'version': version, 'state': state}\n        if len(out) == 0:\n            snap_msg = \"\\n\".join(lines)\n            raise RuntimeError(f'{snap_msg}\\ncould not '\n                               f'read SNAP version information')\n        return out\n    \n    def __update_config(self):\n        for section in self.sections:\n            if section not in __config__.sections:\n                # log.info('creating section {}..'.format(section))\n                __config__.add_section(section)\n        \n        for key in self.identifiers:\n            if hasattr(self, key):\n                self.__update_config_attr(key, getattr(self, key), 'SNAP')\n        \n        for key in sorted(self.__suffices.keys()):\n            self.__update_config_attr(key, self.__suffices[key], 'SNAP_SUFFIX')\n    \n    @staticmethod\n    def __update_config_attr(attr, value, section):\n        if isinstance(value, list):\n            value = json.dumps(value)\n        \n        if attr not in __config__[section].keys() or __config__[section][attr] != value:\n            # log.info('updating attribute {0}:{1}..'.format(section, attr))\n            # log.info('  {0} -> {1}'.format(repr(config[section][attr]), repr(value)))\n            __config__.set(section, key=attr, value=value, overwrite=True)\n    \n    def get_suffix(self, operator):\n        \"\"\"\n        get the file name suffix for an operator\n        \n        Parameters\n        ----------\n        operator: str\n            the name of the operator\n\n        Returns\n        -------\n        str or None\n            the file suffix or None if unknown\n        \n        Examples\n        --------\n        >>> from pyroSAR.examine import ExamineSnap\n        >>> config = ExamineSnap()\n        >>> print(config.get_suffix('Terrain-Flattening'))\n        'TF'\n        \"\"\"\n        if operator in self.__suffices.keys():\n            return self.__suffices[operator]\n        else:\n            return None\n    \n    def get_version(self, module: str) -> str:\n        \"\"\"\n        Read the version and date of different SNAP modules.\n        The following SNAP command is called to get the information:\n        \n        .. code-block:: bash\n\n            snap --nosplash --nogui --modules --list --refresh --console suppress\n    \n        Parameters\n        ----------\n        module:\n            one of the following\n            \n            - core\n            - desktop\n            - rstb\n            - opttbx\n            - microwavetbx\n\n        Returns\n        -------\n            the version number\n        \"\"\"\n        if ExamineSnap._version_dict is None:\n            ExamineSnap._version_dict = self.__read_version_dict()\n        \n        log.debug(f\"reading version information for module '{module}'\")\n        patterns = {'core': 'org.esa.snap.snap.core',\n                    'desktop': 'org.esa.snap.snap.ui',\n                    'rstb': 'org.csa.rstb.rstb.kit',\n                    'opttbx': 'eu.esa.opt.opttbx.kit',\n                    'microwavetbx': 'eu.esa.microwavetbx.microwavetbx.kit'}\n        \n        if module not in patterns.keys():\n            raise ValueError(f\"'{module}' is not a valid module name. \"\n                             f\"Supported options: {patterns.keys()}\")\n        \n        for k, v in ExamineSnap._version_dict.items():\n            if patterns[module] == k:\n                if v['state'] == 'Available':\n                    raise RuntimeError(f'{module} is not installed')\n                log.debug(f'version is {v[\"version\"]}')\n                return v['version']\n        raise RuntimeError(f\"Could not find version \"\n                           f\"information for module '{module}'.\")\n    \n    @property\n    def auxdatapath(self):\n        \"\"\"\n        Get/set the SNAP configuration for `AuxDataPath` in `snap.auxdata.properties`.\n        \n        Example\n        -------\n        >>> from pyroSAR.examine import ExamineSnap\n        >>> config = ExamineSnap()\n        >>> config.auxdatapath = '/path/to/snap/auxdata'\n        # This is equivalent to\n        >>> config.snap_properties['AuxDataPath'] = '/path/to/snap/auxdata'\n        \"\"\"\n        out = self.snap_properties['AuxDataPath']\n        if out is None:\n            out = os.path.join(self.userpath, 'auxdata')\n        return out\n    \n    @auxdatapath.setter\n    def auxdatapath(self, value):\n        self.snap_properties['AuxDataPath'] = value\n    \n    @property\n    def userpath(self):\n        \"\"\"\n        Get/set the SNAP configuration for `snap.userdir` in `snap.properties`.\n\n        Example\n        -------\n        >>> from pyroSAR.examine import ExamineSnap\n        >>> config = ExamineSnap()\n        >>> config.userpath = '/path/to/snap/data'\n        # This is equivalent to\n        >>> config.snap_properties['snap.userdir'] = '/path/to/snap/data'\n        \"\"\"\n        return self.snap_properties.userpath\n    \n    @userpath.setter\n    def userpath(self, value):\n        self.snap_properties.userpath = value\n\n\nclass ExamineGamma(object):\n    \"\"\"\n    Class to check if GAMMA is installed.\n    \n    Examples\n    --------\n    >>> from pyroSAR.examine import ExamineGamma\n    >>> config = ExamineGamma()\n    >>> print(config.home)\n    >>> print(config.version)\n    \n    \"\"\"\n    \n    def __init__(self):\n        home_sys = os.environ.get('GAMMA_HOME')\n        if home_sys is not None and not os.path.isdir(home_sys):\n            warnings.warn('found GAMMA_HOME environment variable, but directory does not exist')\n            home_sys = None\n        \n        self.__read_config()\n        \n        if hasattr(self, 'home'):\n            if home_sys is not None and self.home != home_sys:\n                log.info('the value of GAMMA_HOME is different to that in the pyroSAR configuration;\\n'\n                         '  was: {}\\n'\n                         '  is : {}\\n'\n                         'resetting the configuration and deleting parsed modules'\n                         .format(self.home, home_sys))\n                parsed = os.path.join(os.path.dirname(self.fname), 'gammaparse')\n                shutil.rmtree(parsed)\n                self.home = home_sys\n        if not hasattr(self, 'home'):\n            if home_sys is not None:\n                setattr(self, 'home', home_sys)\n            else:\n                raise RuntimeError('could not read GAMMA installation directory')\n        self.version = re.search('GAMMA_SOFTWARE[-/](?P<version>[0-9]{8})',\n                                 getattr(self, 'home')).group('version')\n        \n        try:\n            returncode, out, err = run(['which', 'gdal-config'], void=False)\n            gdal_config = out.strip('\\n')\n            self.gdal_config = gdal_config\n        except sp.CalledProcessError:\n            raise RuntimeError('could not find command gdal-config.')\n        self.__update_config()\n    \n    def __read_config(self):\n        self.fname = __config__.file\n        if 'GAMMA' in __config__.sections:\n            attr = __config__['GAMMA']\n            for key, value in attr.items():\n                setattr(self, key, value)\n    \n    def __update_config(self):\n        if 'GAMMA' not in __config__.sections:\n            __config__.add_section('GAMMA')\n        \n        for attr in ['home', 'version']:\n            self.__update_config_attr(attr, getattr(self, attr), 'GAMMA')\n    \n    @staticmethod\n    def __update_config_attr(attr, value, section):\n        if isinstance(value, list):\n            value = json.dumps(value)\n        \n        if attr not in __config__[section].keys() or __config__[section][attr] != value:\n            __config__.set(section, key=attr, value=value, overwrite=True)\n\n\nclass SnapProperties(object):\n    \"\"\"\n    SNAP configuration interface. This class enables reading and modifying\n    SNAP configuration in properties files. Modified properties are directly\n    written to the files.\n    Currently, the files `snap.properties`, `snap.auxdata.properties` and `snap.conf`\n    are supported. These files can be found in two locations:\n    \n    - `<SNAP installation directory>/etc`\n    - `<user directory>/.snap/etc`\n    \n    Configuration in the latter has higher priority, and modified properties will\n    always be written there so that the installation directory is not modified.\n\n    Parameters\n    ----------\n    path: str\n        SNAP installation directory path\n    \n    Examples\n    --------\n    >>> from pyroSAR.examine import ExamineSnap, SnapProperties\n    >>> path = ExamineSnap().path\n    >>> config = SnapProperties(path=path)\n    >>> config['snap.userdir'] = '/path/to/snap/auxdata'\n    \"\"\"\n    \n    def __init__(self, path):\n        self.pattern = r'^(?P<comment>#?)(?P<key>[\\w\\.]*)[ ]*=[ ]*\"?(?P<value>[^\"\\n]*)\"?\\n*'\n        self.pattern_key_replace = r'#?{}[ ]*=[ ]*(?P<value>.*)'\n        \n        self.properties_path = os.path.join(path, 'etc', 'snap.properties')\n        log.debug(f\"reading {self.properties_path}\")\n        self.properties = self._to_dict(self.properties_path)\n        self.properties.update(self._to_dict(self.userpath_properties))\n        \n        self.auxdata_properties_path = os.path.join(path, 'etc', 'snap.auxdata.properties')\n        log.debug(f\"reading {self.auxdata_properties_path}\")\n        self.auxdata_properties = self._to_dict(self.auxdata_properties_path)\n        self.auxdata_properties.update(self._to_dict(self.userpath_auxdata_properties))\n        \n        self.conf_path = os.path.join(path, 'etc', 'snap.conf')\n        log.debug(f\"reading {self.conf_path}\")\n        str_split = {'default_options': ' '}\n        self.conf = self._to_dict(path=self.conf_path, str_split=str_split)\n        self.conf.update(self._to_dict(self.userpath_conf, str_split=str_split))\n        \n        self._dicts = [self.properties, self.auxdata_properties, self.conf]\n        \n        # removing this because of\n        # \"RuntimeError: OpenJDK 64-Bit Server VM warning: Options\n        # -Xverify:none and -noverify were deprecated in JDK 13 and will\n        # likely be removed in a future release.\"\n        if '-J-Xverify:none' in self.conf['default_options']:\n            opts = self.conf['default_options'].copy()\n            opts.remove('-J-Xverify:none')\n            self['default_options'] = opts\n        \n        # some properties need to be read from the default user path to\n        # be visible to SNAP\n        pairs = [(self.userpath_properties, self.properties_path),\n                 (self.userpath_auxdata_properties, self.auxdata_properties_path)]\n        for default, defined in pairs:\n            if default != defined:\n                conf = self._to_dict(default)\n                if len(conf.keys()) > 0:\n                    log.debug(f\"updating keys {list(conf.keys())} from {default}\")\n                    self.properties.update(conf)\n    \n    def __getitem__(\n            self,\n            key: str\n    ) -> int | float | str | list[str]:\n        for section in self._dicts:\n            if key in section:\n                return section[key].copy() \\\n                    if hasattr(section[key], 'copy') \\\n                    else section[key]\n        raise KeyError(f'could not find key {key}')\n    \n    def __setitem__(\n            self,\n            key: str,\n            value: int | float | str | list[str] | None\n    ) -> None:\n        if not (isinstance(value, (int, float, str, list)) or value is None):\n            raise TypeError(f'invalid type for key {key}: {type(value)}')\n        if value == self[key] and isinstance(value, type(self[key])):\n            return\n        if key in self.properties:\n            self.properties[key] = value\n        elif key in self.auxdata_properties:\n            self.auxdata_properties[key] = value\n        else:\n            self.conf[key] = value\n        if value is not None:\n            if isinstance(value, list):\n                value = ' '.join(value)\n            value = str(value).encode('unicode-escape').decode()\n            value = value.replace(':', '\\\\:')\n        if key in self.properties:\n            path = self.userpath_properties\n        elif key in self.auxdata_properties:\n            path = self.userpath_auxdata_properties\n        elif key in self.conf:\n            path = self.userpath_conf\n        else:\n            raise KeyError(f'unknown key {key}')\n        if os.path.isfile(path):\n            with open(path, 'r') as f:\n                content = f.read()\n        else:\n            content = ''\n        pattern = self.pattern_key_replace.format(key)\n        match = re.search(pattern, content)\n        if match:\n            repl = f'#{key} =' if value is None else f'{key} = {value}'\n            content = content.replace(match.group(), repl)\n        else:\n            content += f'\\n{key} = {value}'\n        \n        os.makedirs(os.path.dirname(path), exist_ok=True)\n        log.debug(f\"writing key '{key}' to '{path}'\")\n        with open(path, 'w') as f:\n            f.write(content)\n    \n    def _to_dict(\n            self,\n            path: str,\n            str_split: dict[str, str] | None = None\n    ) -> dict[str, int | float | str | None | list[str]]:\n        \"\"\"\n        Read a properties file into a dictionary.\n        Converts values into basic python types\n        \n        Parameters\n        ----------\n        path:\n            the path to the properties file\n        str_split:\n            a dictionary with properties as keys and splitting characters as values\n            to split a string into a list of strings\n\n        Returns\n        -------\n            the dictionary with the properties\n        \"\"\"\n        out = {}\n        if os.path.isfile(path):\n            with open(path, 'r') as f:\n                for line in f:\n                    if re.search(self.pattern, line):\n                        match = re.match(re.compile(self.pattern), line)\n                        comment, key, value = match.groups()\n                        if comment == '':\n                            if str_split is not None and key in str_split.keys():\n                                value = value.split(str_split[key])\n                            else:\n                                value = self._string_convert(value)\n                            out[key] = value\n                        else:\n                            out[key] = None\n        return out\n    \n    @staticmethod\n    def _string_convert(string):\n        if string.lower() == 'none':\n            return None\n        elif string.lower() == 'true':\n            return True\n        elif string.lower() == 'false':\n            return False\n        else:\n            try:\n                return int(string)\n            except ValueError:\n                try:\n                    return float(string)\n                except ValueError:\n                    return string.replace('\\\\:', ':').replace('\\\\\\\\', '\\\\')\n    \n    def keys(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list[str]\n            all known SNAP property keys\n        \"\"\"\n        keys = []\n        for item in self._dicts:\n            keys.extend(list(item.keys()))\n        return sorted(keys)\n    \n    @property\n    def userpath(self):\n        key = 'snap.userdir'\n        if key not in self.keys() or self[key] is None:\n            return os.path.join(os.path.expanduser('~'), '.snap')\n        else:\n            return self[key]\n    \n    @userpath.setter\n    def userpath(self, value):\n        self['snap.userdir'] = value\n    \n    @property\n    def userpath_auxdata_properties(self):\n        return os.path.join(os.path.expanduser('~'), '.snap',\n                            'etc', 'snap.auxdata.properties')\n    \n    @property\n    def userpath_properties(self):\n        return os.path.join(os.path.expanduser('~'), '.snap',\n                            'etc', 'snap.properties')\n    \n    @property\n    def userpath_conf(self):\n        return os.path.join(os.path.expanduser('~'), '.snap',\n                            'etc', 'snap.conf')\n"
  },
  {
    "path": "pyroSAR/gamma/__init__.py",
    "content": "from .auxil import process, ISPPar, UTM, Spacing, Namespace, slc_corners, par2hdr\nfrom .util import calibrate, convert2gamma, correctOSV, geocode, multilook, ovs, S1_deburst\nfrom . import dem\n"
  },
  {
    "path": "pyroSAR/gamma/api.py",
    "content": "###############################################################################\n# import wrapper for the pyroSAR GAMMA API\n\n# Copyright (c) 2018-2019, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\nimport os\nimport sys\nimport warnings\n\nfrom .parser import autoparse\n\ntry:\n    autoparse()\n    \n    sys.path.insert(0, os.path.join(os.path.expanduser('~'), '.pyrosar'))\n    \n    try:\n        from gammaparse import *\n    except ImportError:\n        warnings.warn('found a GAMMA installation directory, but module parsing failed')\n\nexcept RuntimeError:\n    warnings.warn('could not find GAMMA installation directory; please set the GAMMA_HOME environment variable')\n"
  },
  {
    "path": "pyroSAR/gamma/auxil.py",
    "content": "###############################################################################\n# general GAMMA utilities\n\n# Copyright (c) 2014-2026, the pyroSAR Developers, Stefan Engelhardt.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n################################################################################\nimport math\nimport os\nimport re\nimport string\nimport codecs\nimport subprocess as sp\nfrom datetime import datetime, timedelta\n\nfrom pyroSAR.examine import ExamineGamma\nfrom spatialist.ancillary import parse_literal, run, union, dissolve\nfrom spatialist.envi import hdr\n\nfrom .error import gammaErrorHandler\n\n\ndef do_execute(par, ids, exist_ok):\n    \"\"\"\n    small helper function to assess whether a GAMMA command shall be executed.\n\n    Parameters\n    ----------\n    par: dict\n        a dictionary containing all arguments for the command\n    ids: list[str]\n        the IDs of the output files\n    exist_ok: bool\n        allow existing output files?\n\n    Returns\n    -------\n    bool\n        execute the command because (a) not all output files exist or (b) existing files are not allowed\n    \"\"\"\n    all_exist = all([os.path.isfile(par[x]) for x in ids if par[x] != '-'])\n    return (exist_ok and not all_exist) or not exist_ok\n\n\nclass ISPPar(object):\n    \"\"\"\n    Reader for ISP parameter files of the GAMMA software package\n\n    This class allows to read all information from files in GAMMA's parameter file format.\n    Each key-value pair is parsed and added as attribute. For instance if the parameter file\n    contains the pair 'sensor:    TSX-1' an attribute named 'sensor' with the value 'TSX-1' will be available.\n\n    The values are converted to native Python types, while unit identifiers like 'dB' or 'Hz' are removed.\n    Please see the GAMMA reference manual for further information on the actual file format.\n    \n    Parameters\n    ----------\n    filename: str\n        the GAMMA parameter file\n    \n    Examples\n    --------\n    >>> from pyroSAR.gamma import ISPPar\n    >>> with ISPPar('S1A__IW___A_20141115T181801_VH_grd.par') as par:\n    ...     print(par) # print an overview of all available metadata\n    ...     print(par.keys) # print all parameter names\n    ...     for key, value in par.envidict().items():\n    ...         print('{0}: {1}'.format(key, value)) # print the ENVI HDR compliant metadata\n    \n    Attributes\n    ----------\n    keys: list\n        the names of all parameters\n    \"\"\"\n    \n    _re_kv_pair = re.compile(r'^(\\w+):\\s*(.+)\\s*')\n    _re_float_literal = re.compile(r'^[+-]?(?:(\\d*\\.\\d+)|(\\d+\\.?))(?:[Ee][+-]?\\d+)?')\n    \n    def __init__(self, filename):\n        \"\"\"Parses an ISP parameter file from disk.\n\n        Args:\n            filename: The filename or file object representing the ISP parameter file.\n        \"\"\"\n        if isinstance(filename, str):\n            par_file = open(filename, 'r')\n        else:\n            par_file = filename\n        \n        self.keys = ['filetype']\n        \n        try:\n            content = par_file.read().split('\\n')\n        except UnicodeDecodeError:\n            par_file = codecs.open(filename, 'r', encoding='utf-8', errors='ignore')\n            content = par_file.read()\n            printable = set(string.printable)\n            content = filter(lambda x: x in printable, content)\n            content = ''.join(list(content)).split('\\n')\n        finally:\n            par_file.close()\n        \n        if 'Image Parameter File' in content[0]:\n            setattr(self, 'filetype', 'isp')\n        elif 'DEM/MAP parameter file' in content[0]:\n            setattr(self, 'filetype', 'dem')\n        else:\n            raise RuntimeError('unknown parameter file type')\n        \n        for line in content:\n            match = ISPPar._re_kv_pair.match(line)\n            if not match:\n                continue  # Skip malformed lines with no key-value pairs\n            key = match.group(1)\n            items = match.group(2).split()\n            if len(items) == 0:\n                value = None\n            elif len(items) == 1:\n                value = parse_literal(items[0])\n            else:\n                if not ISPPar._re_float_literal.match(items[0]):\n                    # Value is a string literal containing whitespace characters\n                    value = match.group(2)\n                else:\n                    # Evaluate each item and stop at the first non-float literal\n                    value = []\n                    for i in items:\n                        match = ISPPar._re_float_literal.match(i)\n                        if match:\n                            value.append(parse_literal(match.group()))\n                        else:\n                            # If the first float literal is immediately followed by a non-float literal handle the\n                            # first one as singular value, e.g. in '20.0970 dB'\n                            if len(value) == 1:\n                                value = value[0]\n                            break\n            self.keys.append(key)\n            setattr(self, key, value)\n        \n        if hasattr(self, 'date'):\n            # the date field is rounded to four digits, so only the day is extracted\n            # and then the start_time field is added to be more precise and to avoid\n            # rounding to 60 s.\n            self.date_dt = datetime(*self.date[:3])\n            self.date_dt += timedelta(seconds=self.start_time)\n            self.date = self.date_dt.strftime('%Y-%m-%dT%H:%M:%S.%f')\n    \n    def __enter__(self):\n        return self\n    \n    def __exit__(self, exc_type, exc_val, exc_tb):\n        return\n    \n    def __getattr__(self, item):\n        # will only be run if object has no attribute item\n        raise AttributeError(\"parameter file has no attribute '{}'\".format(item))\n    \n    def __str__(self):\n        maxlen = len(max(self.keys, key=len)) + 1\n        return '\\n'.join(['{key}:{sep}{value}'.format(key=key,\n                                                      sep=(maxlen - len(key)) * ' ',\n                                                      value=getattr(self, key)) for key in self.keys])\n    \n    def envidict(self, nodata=None):\n        \"\"\"\n        export relevant metadata to an ENVI HDR file compliant format\n        \n        Parameters\n        ----------\n        nodata: int, float or None\n            a no data value to write to the HDR file via attribute 'data ignore value'\n        \n        Returns\n        -------\n        dict\n            a dictionary containing attributes translated to ENVI HDR naming\n        \"\"\"\n        out = dict(bands=1,\n                   header_offset=0,\n                   file_type='ENVI Standard',\n                   interleave='bsq',\n                   sensor_type='Unknown',\n                   byte_order=1,\n                   wavelength_units='Unknown')\n        \n        if hasattr(self, 'date'):\n            out['acquisition_time'] = self.date + 'Z'\n        \n        out['samples'] = getattr(self, union(['width', 'range_samples', 'samples'], self.keys)[0])\n        out['lines'] = getattr(self, union(['nlines', 'azimuth_lines', 'lines'], self.keys)[0])\n        \n        dtypes_lookup = {'FCOMPLEX': 6, 'FLOAT': 4, 'REAL*4': 4, 'INTEGER*2': 2, 'SHORT': 12}\n        dtype = getattr(self, union(['data_format', 'image_format'], self.keys)[0])\n        \n        if dtype not in dtypes_lookup.keys():\n            raise TypeError('unsupported data type: {}'.format(dtype))\n        \n        out['data_type'] = dtypes_lookup[dtype]\n        \n        if nodata is not None:\n            out['data_ignore_value'] = nodata\n        \n        if out['data_type'] == 6:\n            out['complex_function'] = 'Power'\n        # projections = ['AEAC', 'EQA', 'LCC', 'LCC2', 'OMCH', 'PC', 'PS', 'SCH', 'TM', 'UTM']\n        # the corner coordinates are shifted by 1/2 pixel to the Northwest since GAMMA pixel\n        # coordinates are defined for the pixel center while in ENVI it is the upper left\n        if hasattr(self, 'DEM_projection'):\n            if self.DEM_projection == 'UTM':\n                hem = 'North' if float(self.false_northing) == 0 else 'South'\n                out['map_info'] = ['UTM', '1.0000', '1.0000',\n                                   self.corner_east - (abs(self.post_east) / 2),\n                                   self.corner_north + (abs(self.post_north) / 2),\n                                   str(abs(float(self.post_east))),\n                                   str(abs(float(self.post_north))),\n                                   self.projection_zone, hem, 'WGS-84', 'units=Meters']\n            elif self.DEM_projection == 'EQA':\n                out['map_info'] = ['Geographic Lat/Lon', '1.0000', '1.0000',\n                                   self.corner_lon - (abs(self.post_lon) / 2),\n                                   self.corner_lat + (abs(self.post_lat) / 2),\n                                   str(abs(float(self.post_lon))),\n                                   str(abs(float(self.post_lat))),\n                                   'WGS-84', 'units=Degrees']\n            elif self.DEM_projection == 'PS':\n                if self.projection_name == 'WGS 84 / Antarctic Polar Stereographic':\n                    out['map_info'] = [\n                        'EPSG:3031 - WGS 84 / Antarctic Polar Stereographic',\n                        '1.0000',\n                        '1.0000',\n                        self.corner_east - (abs(self.post_east) / 2),\n                        self.corner_north + (abs(self.post_north) / 2),\n                        str(abs(float(self.post_east))),\n                        str(abs(float(self.post_north))),\n                        'WGS-84',\n                        'units=Meters',\n                    ]\n                elif self.projection_name == 'WGS 84 / Arctic Polar Stereographic':\n                    out['map_info'] = [\n                        'EPSG:3995 - WGS 84 / Arctic Polar Stereographic',\n                        '1.0000',\n                        '1.0000',\n                        self.corner_east - (abs(self.post_east) / 2),\n                        self.corner_north + (abs(self.post_north) / 2),\n                        str(abs(float(self.post_east))),\n                        str(abs(float(self.post_north))),\n                        'WGS-84',\n                        'units=Meters',\n                    ]\n                else:\n                    raise RuntimeError(\n                        f'unsupported projection: \"{self.DEM_projection}; {self.projection_name}\". The projection name \"{self.projection_name}\" was not recognised. Expected projection names are \"WGS 84 / Arctic Polar Stereographic\" and \"WGS 84 / Antarctic Polar Stereographic\". Add support for the required projection name as an ENVI map info output in gamma.auxil.ISPPar.envidict.'\n                    )\n            else:\n                raise RuntimeError(\n                    f'unsupported projection: \"{self.DEM_projection}; {self.projection_name}\". To resolve, create an ENVI map info output for this projection in gamma.auxil.ISPPar.envidict.'\n                )\n        return out\n\n\nclass Namespace(object):\n    \"\"\"\n    GAMMA file name handler. This improves managing the many files names\n    handled when processing with GAMMA.\n    \n    Parameters\n    ----------\n    directory: str\n        the directory path where files shall be written.\n    basename: str\n        the product basename as returned by\n        :meth:`pyroSAR.drivers.ID.outname_base`\n    \n    Examples\n    --------\n    >>> n = Namespace(directory='/path', basename='S1A__IW___A_20180829T170631')\n    >>> print(n.pix_geo)\n    '-'\n    >>> n.appreciate(['pix_geo'])\n    >>> print(n.pix_geo)\n    '/path/S1A__IW___A_20180829T170631_pix_geo'\n    \"\"\"\n    \n    def __init__(self, directory, basename):\n        self.__base = basename\n        self.__outdir = directory\n        self.__reg = []\n    \n    def __getitem__(self, item):\n        item = str(item).replace('.', '_')\n        return self.get(item)\n    \n    def __getattr__(self, item):\n        # will only be run if object has no attribute item\n        return '-'\n    \n    def appreciate(self, keys):\n        \"\"\"\n\n        Parameters\n        ----------\n        keys: list[str]\n\n        Returns\n        -------\n\n        \"\"\"\n        for key in keys:\n            setattr(self, key.replace('.', '_'), os.path.join(self.__outdir, self.__base + '_' + key))\n            if key not in self.__reg:\n                self.__reg.append(key.replace('.', '_'))\n    \n    def depreciate(self, keys):\n        \"\"\"\n\n        Parameters\n        ----------\n        keys: list[str]\n\n        Returns\n        -------\n\n        \"\"\"\n        for key in keys:\n            setattr(self, key.replace('.', '_'), '-')\n            if key not in self.__reg:\n                self.__reg.append(key.replace('.', '_'))\n    \n    def getall(self):\n        out = {}\n        for key in self.__reg:\n            out[key] = getattr(self, key)\n        return out\n    \n    def select(self, selection):\n        return [getattr(self, key) for key in selection]\n    \n    def isregistered(self, key):\n        return key in self.__reg\n    \n    def isappreciated(self, key):\n        if self.isregistered(key):\n            if self.get(key) != '-':\n                return True\n        return False\n    \n    def isfile(self, key):\n        return hasattr(self, key) and os.path.isfile(getattr(self, key))\n    \n    def get(self, key):\n        return getattr(self, key)\n\n\ndef par2hdr(parfile, hdrfile, modifications=None, nodata=None):\n    \"\"\"\n    Create an ENVI HDR file from a GAMMA PAR file\n    \n    Parameters\n    ----------\n    parfile: str\n        the GAMMA parfile\n    hdrfile: str\n        the ENVI HDR file\n    modifications: dict or None\n        a dictionary containing value deviations to write to the HDR file\n    nodata: int, float or None\n        a no data value to write to the HDR file via attribute 'data ignore value'\n\n    Returns\n    -------\n    \n    Examples\n    --------\n    >>> from pyroSAR.gamma.auxil import par2hdr\n    >>> par2hdr('dem_seg.par', 'inc.hdr')\n    # write a HDR file for byte data based on a parfile of float data\n    >>> par2hdr('dem_seg.par', 'ls_map.hdr', modifications={'data_type': 1})\n    \n    See Also\n    --------\n    :class:`spatialist.envi.HDRobject`\n    :func:`spatialist.envi.hdr`\n    \"\"\"\n    \n    with ISPPar(parfile) as par:\n        items = par.envidict(nodata)\n        if modifications is not None:\n            items.update(modifications)\n        hdr(items, hdrfile)\n\n\ndef process(\n        cmd: list[str],\n        outdir: str | None = None,\n        logfile: str | None = None,\n        logpath: str | None = None,\n        inlist: list[str] | None = None,\n        void: bool = True,\n        shellscript: str | None = None\n) -> tuple[str, str] | None:\n    \"\"\"\n    wrapper function to execute GAMMA commands via module :mod:`subprocess`\n\n    Parameters\n    ----------\n    cmd:\n        The command line arguments.\n    outdir:\n        The directory to execute the command in. This directory is also set\n        as environment variable in `shellscript`.\n    logfile:\n        A file to write the command log to. Overrides parameter `logpath`.\n    logpath:\n        A directory to write logfiles to. The file will be named\n        {GAMMA command}.log, e.g. gc_map.log.\n        Overrides parameter `logfile`.\n    inlist:\n        A list of values, which is passed as interactive inputs via `stdin`.\n    void:\n        Return the `stdout` and `stderr` messages?\n    shellscript:\n        A file to write the GAMMA commands to in shell format.\n\n    Returns\n    -------\n        the stdout and stderr messages if void is False, otherwise None\n    \"\"\"\n    if logfile is not None:\n        log = logfile\n    else:\n        log = os.path.join(logpath, os.path.basename(cmd[0]) + '.log') if logpath else None\n    gamma_home = ExamineGamma().home\n    if shellscript is not None:\n        if not os.path.isfile(shellscript):\n            # create an empty file\n            with open(shellscript, 'w') as init:\n                pass\n        line = ' '.join([str(x) for x in dissolve(cmd)])\n        if inlist is not None:\n            line += ' <<< $\"{}\"'.format('\\n'.join([str(x) for x in inlist]) + '\\n')\n        with open(shellscript, 'r+') as sh:\n            content = sh.read()\n            sh.seek(0)\n            disclaimer = 'This script was created automatically by pyroSAR'\n            is_new = re.search(disclaimer, content) is None\n            if is_new:\n                ts = datetime.now().strftime('%a %b %d %H:%M:%S %Y')\n                sh.write(f'# {disclaimer} on {ts}\\n\\n')\n                sh.write('GAMMA_HOME={}\\n\\n'.format(gamma_home))\n                sh.write(content)\n            line = line.replace(gamma_home, '$GAMMA_HOME')\n            if outdir is not None:\n                line = line.replace(outdir, '$OUTDIR')\n                outdirs = re.findall('OUTDIR=(.*)\\n', content)\n                if len(outdirs) == 0 or outdir != outdirs[-1]:\n                    line = f\"OUTDIR={outdir}\\n\\n{line}\"\n            sh.seek(0, 2)  # set pointer to the end of the file\n            sh.write(line + '\\n\\n')\n    \n    # create an environment containing the locations of all GAMMA submodules to be passed to the subprocess calls\n    gammaenv = os.environ.copy()\n    gammaenv['GAMMA_HOME'] = gamma_home\n    returncode, out, err = run([ExamineGamma().gdal_config, '--datadir'], void=False)\n    gammaenv['GDAL_DATA'] = out.strip()\n    for module in ['DIFF', 'DISP', 'IPTA', 'ISP', 'LAT']:\n        loc = os.path.join(gammaenv['GAMMA_HOME'], module)\n        if os.path.isdir(loc):\n            gammaenv[module + '_HOME'] = loc\n            for submodule in ['bin', 'scripts']:\n                subloc = os.path.join(loc, submodule)\n                if os.path.isdir(subloc):\n                    gammaenv['PATH'] += os.pathsep + subloc\n    \n    # execute the command\n    returncode, out, err = run(cmd, outdir=outdir, logfile=log, inlist=inlist,\n                               void=False, errorpass=True, env=gammaenv)\n    gammaErrorHandler(returncode, out, err)\n    if not void:\n        return out, err\n\n\ndef slc_corners(parfile):\n    \"\"\"\n    extract the corner coordinates of a SAR scene\n\n    Parameters\n    ----------\n    parfile: str\n        the GAMMA parameter file to read coordinates from\n\n    Returns\n    -------\n    dict of float\n        a dictionary with keys xmin, xmax, ymin, ymax\n    \"\"\"\n    out, err = process(['SLC_corners', parfile], void=False)\n    pts = {}\n    pattern = r'-?[0-9]+\\.[0-9]+'\n    for line in out.split('\\n'):\n        if line.startswith('min. latitude'):\n            pts['ymin'], pts['ymax'] = [float(x) for x in\n                                        re.findall(pattern, line)]\n        elif line.startswith('min. longitude'):\n            pts['xmin'], pts['xmax'] = [float(x) for x in\n                                        re.findall(pattern, line)]\n    return pts\n\n\nclass Spacing(object):\n    \"\"\"\n    compute multilooking factors and pixel spacings from an ISPPar object for a defined ground range target pixel spacing\n\n    Parameters\n    ----------\n    par: str or ISPPar\n        the ISP parameter file\n    spacing: int or float\n        the target pixel spacing in ground range\n    \"\"\"\n    \n    def __init__(self, par, spacing='automatic'):\n        # compute ground range pixel spacing\n        par = par if isinstance(par, ISPPar) else ISPPar(par)\n        self.groundRangePS = par.range_pixel_spacing / (math.sin(math.radians(par.incidence_angle)))\n        # compute initial multilooking factors\n        if spacing == 'automatic':\n            if self.groundRangePS > par.azimuth_pixel_spacing:\n                ratio = self.groundRangePS / par.azimuth_pixel_spacing\n                self.rlks = 1\n                self.azlks = int(round(ratio))\n            else:\n                ratio = par.azimuth_pixel_spacing / self.groundRangePS\n                self.rlks = int(round(ratio))\n                self.azlks = 1\n        else:\n            self.rlks = int(round(float(spacing) / self.groundRangePS))\n            self.azlks = int(round(float(spacing) / par.azimuth_pixel_spacing))\n\n\nclass UTM(object):\n    \"\"\"\n    convert a gamma parameter file corner coordinate from EQA to UTM\n    \n    Parameters\n    ----------\n    parfile: str\n        the GAMMA parameter file to read the coordinate from\n    \n    Example\n    -------\n    \n    >>> from pyroSAR.gamma import UTM\n    >>> print(UTM('gamma.par').zone)\n    \"\"\"\n    \n    def __init__(self, parfile):\n        par = ISPPar(parfile)\n        inlist = ['WGS84', 1, 'EQA', par.corner_lon, par.corner_lat, '', 'WGS84', 1, 'UTM', '']\n        inlist = map(str, inlist)\n        proc = sp.Popen(['coord_trans'], stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE,\n                        universal_newlines=True, shell=False)\n        out, err = proc.communicate(''.join([x + '\\n' for x in inlist]))\n        out = [x for x in filter(None, out.split('\\n')) if ':' in x]\n        \n        self.meta = dict()\n        for line in out:\n            key, value = re.split(r'\\s*:\\s*', line)\n            value = value.split()\n            value = map(parse_literal, value) if len(value) > 1 else value[0]\n            self.meta[key] = value\n        try:\n            self.zone, self.northing, self.easting, self.altitude = \\\n                self.meta['UTM zone/northing/easting/altitude (m)']\n        except KeyError:\n            self.zone, self.northing, self.easting = \\\n                self.meta['UTM zone/northing/easting (m)']\n"
  },
  {
    "path": "pyroSAR/gamma/dem.py",
    "content": "###############################################################################\n# preparation of DEM data for use in GAMMA\n\n# Copyright (c) 2014-2026, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n################################################################################\n\n\"\"\"\nA collection of functions to handle digital elevation models in GAMMA\n\"\"\"\nfrom urllib.request import urlopen\nimport os\nimport re\nimport shutil\nimport zipfile as zf\n\nfrom spatialist import raster, gdal_translate, gdalbuildvrt, gdalwarp, crsConvert\nfrom spatialist.ancillary import finder\nfrom spatialist.envi import HDRobject\n\nfrom ..auxdata import dem_autoload, dem_create\nfrom ..drivers import ID\nfrom . import ISPPar, UTM, slc_corners, par2hdr\nfrom pyroSAR.examine import ExamineGamma\nfrom pyroSAR.ancillary import hasarg\n\nimport logging\n\nlog = logging.getLogger(__name__)\n\ntry:\n    from .api import diff, disp, isp\nexcept ImportError:\n    pass\n\n\ndef fill(dem, dem_out, logpath=None, replace=False):\n    \"\"\"\n    interpolate missing values in the SRTM DEM (value -32768)\n\n    Parameters\n    ----------\n    dem: str\n        the input DEM to be filled\n    dem_out: str\n        the name of the filled DEM\n    logpath: str\n        a directory to write logfiles to\n    replace: bool\n        delete `dem` once finished?\n\n    Returns\n    -------\n\n    \"\"\"\n    width = ISPPar(dem + '.par').width\n    \n    path_dem = os.path.dirname(dem_out)\n    \n    rpl_flg = 0\n    dtype = 4\n    \n    # replace values\n    value = 0\n    new_value = 1\n    disp.replace_values(f_in=dem,\n                        value=value,\n                        new_value=new_value,\n                        f_out=dem + '_temp',\n                        width=width,\n                        rpl_flg=rpl_flg,\n                        dtype=dtype,\n                        logpath=logpath)\n    \n    value = -32768\n    new_value = 0\n    disp.replace_values(f_in=dem + '_temp',\n                        value=value,\n                        new_value=new_value,\n                        f_out=dem + '_temp2',\n                        width=width,\n                        rpl_flg=rpl_flg,\n                        dtype=dtype,\n                        outdir=path_dem,\n                        logpath=logpath)\n    \n    # interpolate missing values\n    isp.interp_ad(data_in=dem + '_temp2',\n                  data_out=dem_out,\n                  width=width,\n                  r_max=9,\n                  np_min=40,\n                  np_max=81,\n                  w_mode=2,\n                  dtype=dtype,\n                  outdir=path_dem,\n                  logpath=logpath)\n    \n    # remove temporary files\n    os.remove(dem + '_temp')\n    os.remove(dem + '_temp2')\n    \n    # duplicate parameter file for newly created dem\n    shutil.copy(dem + '.par', dem_out + '.par')\n    \n    # create ENVI header file\n    par2hdr(dem_out + '.par', dem_out + '.hdr')\n    \n    if replace:\n        for item in [dem + x for x in ['', '.par', '.hdr', '.aux.xml'] if os.path.isfile(dem + x)]:\n            os.remove(item)\n\n\ndef transform(infile, outfile, posting=90):\n    \"\"\"\n    transform SRTM DEM from EQA to UTM projection\n    \"\"\"\n    # read DEM parameter file\n    par = ISPPar(infile + '.par')\n    \n    # transform corner coordinate to UTM\n    utm = UTM(infile + '.par')\n    \n    for item in [outfile, outfile + '.par']:\n        if os.path.isfile(item):\n            os.remove(item)\n    \n    # determine false northing from parameter file coordinates\n    falsenorthing = 10000000. if par.corner_lat < 0 else 0\n    \n    # create new DEM parameter file with UTM projection details\n    inlist = ['UTM', 'WGS84', 1, utm.zone, falsenorthing, os.path.basename(outfile), '', '', '', '', '',\n              '-{0} {0}'.format(posting), '']\n    \n    diff.create_dem_par(DEM_par=outfile + '.par',\n                        inlist=inlist)\n    \n    # transform dem\n    diff.dem_trans(DEM1_par=infile + '.par',\n                   DEM1=infile,\n                   DEM2_par=outfile + '.par',\n                   DEM2=outfile,\n                   bflg=1)\n    par2hdr(outfile + '.par', outfile + '.hdr')\n\n\ndef dem_autocreate(geometry, demType, outfile, buffer=None, t_srs=4326, tr=None, logpath=None,\n                   username=None, password=None, geoid_mode='gamma', resampling_method='bilinear'):\n    \"\"\"\n    | automatically create a DEM in GAMMA format for a defined spatial geometry.\n    | The following steps will be performed:\n\n    - collect all tiles overlapping with the geometry using :func:`pyroSAR.auxdata.dem_autoload`\n\n      * if they don't yet exist locally they will automatically be downloaded\n      * the tiles will be downloaded into the SNAP auxdata directory structure,\n        e.g. ``$HOME/.snap/auxdata/dem/SRTM 3Sec``\n\n    - create a mosaic GeoTIFF of the same spatial extent as the input geometry\n      plus a defined buffer using :func:`pyroSAR.auxdata.dem_create`\n    \n    - if necessary, subtract the geoid-ellipsoid difference (see :func:`pyroSAR.auxdata.dem_autoload`\n      for height references of different supported DEMs)\n    \n    - convert the result to GAMMA format\n    \n      * If ``t_srs`` is `4326` and the DEM's height reference is either `WGS84` ellipsoid or `EGM96` geoid,\n        the command ``srtm2dem`` can be used. This is kept for backwards compatibility.\n      * For all other cases the newer command ``dem_import`` can be used if it exists and if the command\n        ``create_dem_par`` accepts a parameter `EPSG`.\n\n    Parameters\n    ----------\n    geometry: spatialist.vector.Vector\n        a vector geometry delimiting the output DEM size\n    demType: str\n        the type of DEM to be used; see :func:`~pyroSAR.auxdata.dem_autoload` for options\n    outfile: str\n        the name of the final DEM file\n    buffer: float or None\n        a buffer in degrees to create around the geometry\n    t_srs: int, str or osgeo.osr.SpatialReference\n        A target geographic reference system in WKT, EPSG, PROJ4 or OPENGIS format.\n        See function :func:`spatialist.auxil.crsConvert()` for details.\n        Default: `4326 <https://spatialreference.org/ref/epsg/4326/>`_.\n    tr: tuple or None\n        the target resolution as (xres, yres) in units of ``t_srs``; if ``t_srs`` is kept at its default value of 4326,\n        ``tr`` does not need to be defined and the original resolution is preserved;\n        in all other cases the default of None is rejected\n    logpath: str\n        a directory to write GAMMA logfiles to\n    username: str or None\n        (optional) the user name for services requiring registration;\n        see :func:`~pyroSAR.auxdata.dem_autoload`\n    password: str or None\n        (optional) the password for the registration account\n    geoid_mode: str\n        the software to be used for converting geoid to ellipsoid heights (if necessary); options:\n        \n         - 'gamma'\n         - 'gdal'\n    resampling_method: str\n        the gdalwarp resampling method; See `here <https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r>`_\n        for options.\n\n    Returns\n    -------\n\n    \"\"\"\n    geometry = geometry.clone()\n    \n    epsg = crsConvert(t_srs, 'epsg') if t_srs != 4326 else t_srs\n    \n    if epsg != 4326:\n        if not hasarg(diff.create_dem_par, 'EPSG'):\n            raise RuntimeError('using a different CRS than 4326 is currently '\n                               'not supported for this version of GAMMA')\n        if 'dem_import' not in dir(diff):\n            raise RuntimeError('using a different CRS than 4326 currently requires command '\n                               'dem_import, which is not part of this version of GAMMA')\n        if tr is None:\n            raise RuntimeError('tr needs to be defined if t_srs is not 4326')\n    \n    if os.path.isfile(outfile):\n        log.info('outfile already exists')\n        return\n    \n    tmpdir = outfile + '__tmp'\n    os.makedirs(tmpdir)\n    \n    try:\n        if logpath is not None and not os.path.isdir(logpath):\n            os.makedirs(logpath)\n        \n        vrt = os.path.join(tmpdir, 'dem.vrt')\n        dem = os.path.join(tmpdir, 'dem.tif')\n        \n        if epsg == geometry.getProjection('epsg') and buffer is None:\n            ext = geometry.extent\n            bounds = [ext['xmin'], ext['ymin'], ext['xmax'], ext['ymax']]\n        else:\n            bounds = None\n        geometry.reproject(4326)\n        log.info('collecting DEM tiles')\n        dem_autoload([geometry], demType, vrt=vrt, username=username,\n                     password=password, buffer=buffer)\n        \n        # TanDEM-X DEM, GETASSE30 DEM: ellipsoidal heights,\n        # Copernicus DEM: EGM2008 geoid, all others are EGM96 heights\n        # GAMMA works only with ellipsoid heights and the offset needs to be corrected\n        # starting from GDAL 2.2 the conversion can be done directly in GDAL; see docs of gdalwarp\n        message = 'conversion to GAMMA format'\n        geoid = None\n        if demType not in ['TDX90m', 'GETASSE30']:\n            message = 'geoid correction and conversion to GAMMA format'\n            if re.search('Copernicus [139]0m', demType):\n                geoid = 'EGM2008'\n            elif demType in ['AW3D30', 'SRTM 1Sec HGT', 'SRTM 3Sec']:\n                geoid = 'EGM96'\n            else:\n                raise RuntimeError(\"'demType' is not supported\")\n        \n        if geoid_mode == 'gdal':\n            gamma_geoid = None\n            if geoid is not None:\n                gdal_geoid = True\n            else:\n                gdal_geoid = False\n        elif geoid_mode == 'gamma':\n            gdal_geoid = False\n            gamma_geoid = geoid\n        else:\n            raise RuntimeError(\"'geoid_mode' is not supported\")\n        \n        dem_create(vrt, dem, t_srs=epsg, tr=tr, geoid_convert=gdal_geoid,\n                   resampleAlg=resampling_method, outputBounds=bounds,\n                   geoid=geoid)\n        \n        outfile_tmp = os.path.join(tmpdir, os.path.basename(outfile))\n        \n        log.info(message)\n        \n        dem_import(src=dem, dst=outfile_tmp, geoid=gamma_geoid,\n                   logpath=logpath, outdir=tmpdir)\n        \n        for suffix in ['', '.par', '.hdr']:\n            shutil.copyfile(outfile_tmp + suffix, outfile + suffix)\n    \n    except RuntimeError as e:\n        raise e\n    finally:\n        shutil.rmtree(tmpdir)\n\n\ndef dem_import(\n        src: str,\n        dst: str,\n        geoid: str | None = None,\n        logpath: str | None = None,\n        outdir: str | None = None,\n        shellscript: str | None = None\n) -> None:\n    \"\"\"\n    convert an existing DEM in GDAL-readable format to GAMMA\n    format including optional geoid-ellipsoid conversion.\n    \n    Parameters\n    ----------\n    src:\n        the input DEM\n    dst:\n        the output DEM\n    geoid:\n        the geoid height reference of `src`; supported options:\n        \n        - 'EGM96'\n        - 'EGM2008'\n        - None: assume WGS84 ellipsoid heights and do not convert heights\n    logpath:\n        a directory to write logfiles to\n    outdir:\n        the directory to execute the command in\n    shellscript:\n        a file to write the GAMMA commands to in shell format\n    \"\"\"\n    with raster.Raster(src) as ras:\n        epsg = ras.epsg\n    if epsg != 4326:\n        if not hasarg(diff.create_dem_par, 'EPSG'):\n            raise RuntimeError('using a different CRS than EPSG:4326 is currently '\n                               'not supported for this version of GAMMA')\n        if 'dem_import' not in dir(diff):\n            raise RuntimeError('using a different CRS than 4326 currently requires command '\n                               'dem_import, which is not part of this version of GAMMA')\n    dst_base = os.path.splitext(dst)[0]\n    if geoid is not None:\n        # \"Add interpolated geoid offset relative to the WGS84 datum;\n        # NODATA are set to the interpolated geoid offset.\"\n        gflg = 2\n    else:\n        # \"No geoid offset correction, replace NODATA with a valid near-zero value.\"\n        gflg = 0\n    if epsg == 4326 and geoid == 'EGM96':\n        # old approach for backwards compatibility\n        diff.srtm2dem(SRTM_DEM=src,\n                      DEM=dst,\n                      DEM_par=dst + '.par',\n                      gflg=gflg,\n                      geoid='-',\n                      logpath=logpath,\n                      outdir=outdir,\n                      shellscript=shellscript)\n    else:\n        # new approach enabling an arbitrary target CRS EPSG code\n        diff.create_dem_par(DEM_par=dst_base + '.par',\n                            inlist=[''] * 9,\n                            EPSG=epsg,\n                            logpath=logpath,\n                            outdir=outdir,\n                            shellscript=shellscript)\n        dem_import_pars = {'input_DEM': src,\n                           'DEM': dst,\n                           'DEM_par': dst_base + '.par',\n                           'logpath': logpath,\n                           'outdir': outdir,\n                           'shellscript': shellscript}\n        if gflg == 2:\n            home = ExamineGamma().home\n            if geoid == 'EGM96':\n                geoid_file = os.path.join(home, 'DIFF', 'scripts', 'egm96.dem')\n            elif geoid == 'EGM2008':\n                geoid_file = os.path.join(home, 'DIFF', 'scripts', 'egm2008-5.dem')\n            else:\n                raise RuntimeError(f\"conversion of '{geoid}' geoid is not supported by GAMMA\")\n            dem_import_pars['geoid'] = geoid_file\n            dem_import_pars['geoid_par'] = geoid_file + '_par'\n        \n        diff.dem_import(**dem_import_pars)\n    \n    par2hdr(dst_base + '.par', dst_base + '.hdr', nodata=0)\n\n\ndef dempar(dem, logpath=None):\n    \"\"\"\n    create GAMMA parameter text files for DEM files\n\n    currently only EQA and UTM projections with WGS84 ellipsoid are supported\n\n    Parameters\n    ----------\n    dem: str\n        the name of the DEM\n    logpath: str\n        a directory to write logfiles to\n\n    Returns\n    -------\n\n    \"\"\"\n    rast = raster.Raster(dem)\n    \n    # determine data type\n    dtypes = {'Int16': 'INTEGER*2', 'UInt16': 'INTEGER*2', 'Float32': 'REAL*4'}\n    if rast.dtype not in dtypes:\n        raise IOError('data type not supported')\n    else:\n        dtype = dtypes[rast.dtype]\n    \n    # format pixel posting and top left coordinate\n    posting = str(rast.geo['yres']) + ' ' + str(rast.geo['xres'])\n    latlon = str(rast.geo['ymax']) + ' ' + str(rast.geo['xmin'])\n    \n    # evaluate projection\n    projections = {'longlat': 'EQA', 'utm': 'UTM'}\n    if rast.proj4args['proj'] not in projections:\n        raise ValueError('projection not supported (yet)')\n    else:\n        projection = projections[rast.proj4args['proj']]\n    \n    # get ellipsoid\n    ellipsoid = rast.proj4args['ellps'] if 'ellps' in rast.proj4args else rast.proj4args['datum']\n    if ellipsoid != 'WGS84':\n        raise ValueError('ellipsoid not supported (yet)')\n    \n    # create list for GAMMA command input\n    if projection == 'UTM':\n        zone = rast.proj4args['zone']\n        falsenorthing = 10000000. if rast.geo['ymin'] < 0 else 0\n        parlist = [projection, ellipsoid, 1, zone, falsenorthing, os.path.basename(dem),\n                   dtype, 0, 1, rast.cols, rast.rows, posting, latlon]\n    else:\n        parlist = [projection, ellipsoid, 1, os.path.basename(dem), dtype,\n                   0, 1, rast.cols, rast.rows, posting, latlon]\n    \n    # execute GAMMA command\n    diff.create_dem_par(DEM_par=os.path.splitext(dem)[0] + '.par',\n                        inlist=parlist,\n                        outdir=os.path.dirname(dem),\n                        logpath=logpath)\n\n\ndef swap(data, outname):\n    \"\"\"\n    byte swapping from small to big endian (as required by GAMMA)\n\n    Parameters\n    ----------\n    data: str\n        the DEM file to be swapped\n    outname: str\n        the name of the file to write\n\n    Returns\n    -------\n\n    \"\"\"\n    with raster.Raster(data) as ras:\n        dtype = ras.dtype\n        ras_format = ras.format\n    if ras_format != 'ENVI':\n        raise IOError('only ENVI format supported')\n    dtype_lookup = {'Int16': 2, 'CInt16': 2, 'Int32': 4, 'Float32': 4, 'CFloat32': 4, 'Float64': 8}\n    if dtype not in dtype_lookup:\n        raise IOError('data type {} not supported'.format(dtype))\n    \n    disp.swap_bytes(infile=data,\n                    outfile=outname,\n                    swap_type=dtype_lookup[dtype])\n    \n    with HDRobject(data + '.hdr') as header:\n        header.byte_order = 1\n        header.write(outname + '.hdr')\n\n\ndef mosaic(demlist, outname, byteorder=1, gammapar=True):\n    \"\"\"\n    mosaicing of multiple DEMs\n\n    Parameters\n    ----------\n    demlist: list[str]\n        a list of DEM names to be mosaiced\n    outname: str\n        the name of the final mosaic file\n    byteorder: {0, 1}\n        the byte order of the mosaic\n\n        - 0: small endian\n        - 1: big endian\n\n    gammapar: bool\n        create a GAMMA parameter file for the mosaic?\n\n    Returns\n    -------\n\n    \"\"\"\n    if len(demlist) < 2:\n        raise IOError('length of demlist < 2')\n    with raster.Raster(demlist[0]) as ras:\n        nodata = ras.nodata\n    \n    par = {'format': 'ENVI',\n           'srcNodata': nodata, ' dstNodata': nodata,\n           'options': ['-q']}\n    gdalwarp(src=demlist, dst=outname, **par)\n    \n    if byteorder == 1:\n        swap(outname, outname + '_swap')\n        for item in [outname, outname + '.hdr', outname + '.aux.xml']:\n            os.remove(item)\n        os.rename(outname + '_swap', outname)\n        os.rename(outname + '_swap.hdr', outname + '.hdr')\n    if gammapar:\n        dempar(outname)\n\n\ndef hgt(parfiles):\n    \"\"\"\n    concatenate hgt file names overlapping with multiple SAR scenes\n\n    - this list is read for corner coordinates of which the next integer\n      lower left latitude and longitude is computed\n    - hgt files are supplied in 1 degree equiangular format named e.g.\n      N16W094.hgt (with pattern [NS][0-9]{2}[EW][0-9]{3}.hgt\n    - For north and east hemisphere the respective absolute latitude and longitude\n      values are smaller than the lower left coordinate of the SAR image\n    - west and south coordinates are negative and hence the nearest lower left\n      integer absolute value is going to be larger\n\n    Parameters\n    ----------\n    parfiles: list of str or pyroSAR.ID\n        a list of GAMMA parameter files or pyroSAR ID objects\n\n    Returns\n    -------\n    list\n        the names of hgt files overlapping with the supplied parameter files/objects\n    \"\"\"\n    \n    lat = []\n    lon = []\n    for parfile in parfiles:\n        if isinstance(parfile, ID):\n            corners = parfile.getCorners()\n        elif parfile.endswith('.par'):\n            corners = slc_corners(parfile)\n        else:\n            raise RuntimeError('parfiles items must be of type pyroSAR.ID or GAMMA parfiles with suffix .par')\n        lat += [int(float(corners[x]) // 1) for x in ['ymin', 'ymax']]\n        lon += [int(float(corners[x]) // 1) for x in ['xmin', 'xmax']]\n    \n    # add missing lat/lon values (and add an extra buffer of one degree)\n    lat = range(min(lat), max(lat) + 1)\n    lon = range(min(lon), max(lon) + 1)\n    \n    # convert coordinates to string with leading zeros and hemisphere identification letter\n    lat = [str(x).zfill(2 + len(str(x)) - len(str(x).strip('-'))) for x in lat]\n    lat = [x.replace('-', 'S') if '-' in x else 'N' + x for x in lat]\n    \n    lon = [str(x).zfill(3 + len(str(x)) - len(str(x).strip('-'))) for x in lon]\n    lon = [x.replace('-', 'W') if '-' in x else 'E' + x for x in lon]\n    \n    # concatenate all formatted latitudes and longitudes with each other as final product\n    return [x + y + '.hgt' for x in lat for y in lon]\n\n\ndef makeSRTM(scenes, srtmdir, outname):\n    \"\"\"\n    Create a DEM in GAMMA format from SRTM tiles\n\n    - coordinates are read to determine the required DEM extent and select the necessary hgt tiles\n    - mosaics SRTM DEM tiles, converts them to GAMMA format and subtracts offset to WGS84 ellipsoid\n\n    intended for SRTM products downloaded from:\n\n    - USGS: https://gdex.cr.usgs.gov/gdex/\n    - CGIAR: https://srtm.csi.cgiar.org\n\n    Parameters\n    ----------\n    scenes: list of str or pyroSAR.ID\n        a list of Gamma parameter files or pyroSAR ID objects to read the DEM extent from\n    srtmdir: str\n        a directory containing the SRTM hgt tiles\n    outname: str\n        the name of the final DEM file\n\n    Returns\n    -------\n\n    \"\"\"\n    \n    tempdir = outname + '___temp'\n    os.makedirs(tempdir)\n    \n    hgt_options = hgt(scenes)\n    \n    hgt_files = finder(srtmdir, hgt_options)\n    \n    nodatas = list(set([raster.Raster(x).nodata for x in hgt_files]))\n    if len(nodatas) == 1:\n        nodata = nodatas[0]\n    else:\n        raise RuntimeError('different nodata values are not permitted')\n    \n    srtm_vrt = os.path.join(tempdir, 'srtm.vrt')\n    srtm_temp = srtm_vrt.replace('.vrt', '_tmp')\n    srtm_final = srtm_vrt.replace('.vrt', '')\n    \n    gdalbuildvrt(src=hgt_files, dst=srtm_vrt, srcNodata=nodata, options=['-overwrite'])\n    \n    gdal_translate(src=srtm_vrt, dst=srtm_temp, format='ENVI', noData=nodata)\n    \n    diff.srtm2dem(SRTM_DEM=srtm_temp,\n                  DEM=srtm_final,\n                  DEM_par=srtm_final + '.par',\n                  gflg=2,\n                  geoid='-',\n                  outdir=tempdir)\n    \n    shutil.move(srtm_final, outname)\n    shutil.move(srtm_final + '.par', outname + '.par')\n    par2hdr(outname + '.par', outname + '.hdr')\n    \n    shutil.rmtree(tempdir)\n\n\ndef hgt_collect(parfiles, outdir, demdir=None, arcsec=3):\n    \"\"\"\n    automatic downloading and unpacking of srtm tiles\n\n    Parameters\n    ----------\n    parfiles: list of str or pyroSAR.ID\n        a list of Gamma parameter files or pyroSAR ID objects\n    outdir: str\n        a target directory to download the tiles to\n    demdir: str or None\n        an additional directory already containing hgt tiles\n    arcsec: {1, 3}\n        the spatial resolution to be used\n\n    Returns\n    -------\n    list\n        the names of all local hgt tiles overlapping with the parfiles\n    \"\"\"\n    \n    # concatenate required hgt tile names\n    target_ids = hgt(parfiles)\n    \n    targets = []\n    \n    pattern = '[NS][0-9]{2}[EW][0-9]{3}'\n    \n    # if an additional dem directory has been defined, check this directory for required hgt tiles\n    if demdir is not None:\n        targets.extend(finder(demdir, target_ids))\n    \n    # check for additional potentially existing hgt tiles in the defined output directory\n    extras = [os.path.join(outdir, x) for x in target_ids if\n              os.path.isfile(os.path.join(outdir, x)) and not re.search(x, '\\n'.join(targets))]\n    targets.extend(extras)\n    \n    log.info('found {} relevant SRTM tiles...'.format(len(targets)))\n    \n    # search server for all required tiles, which were not found in the local directories\n    if len(targets) < len(target_ids):\n        log.info('searching for additional SRTM tiles on the server...')\n        onlines = []\n        \n        if arcsec == 1:\n            remotes = ['http://e4ftl01.cr.usgs.gov/SRTM/SRTMGL1.003/2000.02.11/']\n            remotepattern = pattern + '.SRTMGL1.hgt.zip'\n        elif arcsec == 3:\n            server = 'https://dds.cr.usgs.gov/srtm/version2_1/SRTM3/'\n            remotes = [os.path.join(server, x) for x in\n                       ['Africa', 'Australia', 'Eurasia', 'Islands', 'North_America', 'South_America']]\n            remotepattern = pattern + '[.]hgt.zip'\n        else:\n            raise ValueError('argument arcsec must be of value 1 or 3')\n        \n        for remote in remotes:\n            response = urlopen(remote).read()\n            items = sorted(set(re.findall(remotepattern, response)))\n            for item in items:\n                outname = re.findall(pattern, item)[0] + '.hgt'\n                if outname in target_ids and outname not in [os.path.basename(x) for x in targets]:\n                    onlines.append(os.path.join(remote, item))\n        \n        # if additional tiles have been found online, download and unzip them to the local directory\n        if len(onlines) > 0:\n            log.info('downloading {} SRTM tiles...'.format(len(onlines)))\n            for candidate in onlines:\n                localname = os.path.join(outdir, re.findall(pattern, candidate)[0] + '.hgt')\n                infile = urlopen(candidate)\n                with open(localname + '.zip', 'wb') as outfile:\n                    outfile.write(infile.read())\n                infile.close()\n                with zf.ZipFile(localname + '.zip', 'r') as z:\n                    z.extractall(outdir)\n                os.remove(localname + '.zip')\n                targets.append(localname)\n    return targets\n"
  },
  {
    "path": "pyroSAR/gamma/error.py",
    "content": "###############################################################################\n# interface for translating GAMMA errors messages into Python error types\n\n# Copyright (c) 2015-2026, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\n\nimport re\nimport signal\n\n\ndef gammaErrorHandler(returncode: int, out: str, err: str) -> None:\n    \"\"\"\n    Function to raise errors in Python. This function is not intended\n    for direct use but as part of function :func:`pyroSAR.gamma.auxil.process`.\n    \n    Parameters\n    ----------\n    returncode:\n        the subprocess return code\n    out:\n        the stdout message returned by a subprocess call of a gamma command\n    err:\n        the stderr message returned by a subprocess call of a gamma command\n\n    Raises: IOError | ValueError | RuntimeError\n\n    \"\"\"\n    \n    # scan stdout and stdin messages for lines starting with 'ERROR'\n    messages = out.split('\\n') if out else []\n    messages.extend(err.strip().split('\\n'))\n    errormessages = [x for x in messages if x.startswith('ERROR')]\n    \n    # registry of known gamma error messages and corresponding Python error types\n    # do not change the Python error types of specific messages! This will change the behavior of several functions\n    # in case no error is to be thrown define None as error type\n    knownErrors = {'image data formats differ': IOError,\n                   'cannot open': IOError,\n                   r'no coverage of SAR image by DEM(?: \\(in (?:latitude/northing|longitude/easting)\\)|)': RuntimeError,\n                   'libgdal.so.1: no version information available': None,\n                   'line outside of image': ValueError,\n                   'no offsets found above SNR threshold': ValueError,\n                   'window size < 4': ValueError,\n                   'MLI oversampling factor must be 1, 2, 4, 8': ValueError,\n                   'no points available for determining average intensity': ValueError,\n                   'p_interp(): time outside of range': RuntimeError,\n                   'no overlap with lookup table': RuntimeError,\n                   'insufficient offset points to determine offset model parameters': RuntimeError,\n                   'insufficient offset points left after culling to determine offset model parameters': RuntimeError,\n                   'calloc_1d: number of elements <= 0': ValueError,\n                   'multi-look output line:': RuntimeError,\n                   'no OPOD state vector found with the required start time!': RuntimeError,\n                   'gc_map operates only with slant range geometry, image geometry in SLC_par: GROUND_RANGE': RuntimeError,\n                   'OPOD state vector data ends before start of the state vector time window': RuntimeError,\n                   'non-zero exit status': RuntimeError,\n                   'unsupported DEM projection': RuntimeError,\n                   'tiffWriteProc:No space left on device': RuntimeError,\n                   'in subroutine julday: there is no year zero!': RuntimeError,\n                   'cannot create ISP image parameter file': OSError}\n    \n    # check if the error message is known and throw the mapped error from knownErrors accordingly.\n    # Otherwise raise a RuntimeError if killed by a signal and a GammaUnknownError in all other cases.\n    # The actual message is passed to the error and thus visible for backtracing.\n    if returncode != 0:\n        if len(errormessages) > 0:\n            errormessage = errormessages[-1]\n            err_out = '\\n\\n'.join([re.sub('ERROR[: ]*', '', x) for x in errormessages])\n            for error in knownErrors:\n                if re.search(error, errormessage):\n                    errortype = knownErrors[error]\n                    if errortype:\n                        raise errortype(err_out)\n                    else:\n                        return\n        else:\n            err_out = f'{err}\\nfailed with return code {returncode}'\n            if returncode < 0:\n                # handle signal kills like SIGSEGV (segmentation fault)\n                sig = signal.Signals(-returncode)\n                raise RuntimeError(err_out + f' ({sig.name})')\n        raise GammaUnknownError(err_out)\n\n\nclass GammaUnknownError(Exception):\n    \"\"\"\n    This is a general error, which is raised if the error message is not yet integrated\n    into the known errors of function :func:`gammaErrorHandler`.\n    If this error occurs, the message should be included in this function.\n    \"\"\"\n    \n    def __init__(self, errormessage):\n        Exception.__init__(self, errormessage)\n"
  },
  {
    "path": "pyroSAR/gamma/parser.py",
    "content": "###############################################################################\n# parse GAMMA command docstrings to Python functions\n\n# Copyright (c) 2015-2025, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\nimport os\nimport re\nfrom shutil import which\nimport subprocess as sp\nfrom collections import Counter\nfrom spatialist.ancillary import finder, dissolve\n\nfrom pyroSAR.examine import ExamineGamma\n\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef parse_command(command, indent='    '):\n    \"\"\"\n    Parse the help text of a GAMMA command to a Python function including a docstring.\n    The docstring is in rst format and can thu be parsed by e.g. sphinx.\n    This function is not intended to be used by itself, but rather within function :func:`parse_module`.\n\n    Parameters\n    ----------\n    command: str\n        the name of the gamma command\n    indent: str\n        the Python function indentation string; default: four spaces\n\n    Returns\n    -------\n    str\n        the full Python function text\n\n    \"\"\"\n    # run the command without passing arguments to just catch its usage description\n    command = which(command)\n    if command is None:\n        raise OSError('command does not exist')\n    command_base = os.path.basename(command)\n    proc = sp.Popen(command, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE, universal_newlines=True)\n    out, err = proc.communicate()\n    # sometimes the description string is split between stdout and stderr\n    # for the following commands stderr contains the usage description line, which is inserted into stdout\n    if command_base in ['ras_pt', 'ras_data_pt', 'rasdt_cmap_pt']:\n        out = out.replace(' ***\\n ', ' ***\\n ' + err)\n    else:\n        # for all other commands stderr is just appended to stdout\n        out += err\n    \n    # raise a warning when the command has been deprecated\n    # extract all lines starting and ending with three asterisks\n    matches = re.findall(r'^\\*{3}\\s*(.*?)\\s*\\*{3}$', out, re.MULTILINE)\n    if matches:\n        # join the lines and search for a deprecation message\n        cleaned = ' '.join(matches)\n        pattern = (r'([\\w\\.]+ (?:has been|was) re(?:named to|placed(?: that [ \\*\\n]*|) by)'\n                   r'(?:[ \\*\\n]*|)(?: the ISP program|) [\\w\\.]+)')\n        match = re.search(pattern, cleaned)\n        if match:\n            raise DeprecationWarning(match.group())\n    \n    if re.search(r\"Can't locate FILE/Path\\.pm in @INC\", out):\n        raise RuntimeError('unable to parse Perl script')\n    ###########################################\n    # fix command-specific inconsistencies in parameter naming\n    # in several commands the parameter naming in the usage description line does not match that of the docstring\n    parnames_lookup = {'2PASS_INT': [('OFF_PAR', 'OFF_par')],\n                       'adapt_filt': [('low_snr_thr', 'low_SNR_thr')],\n                       'atm_mod2': [('rpt', 'report'),\n                                    ('[mode]', '[model_atm]'),\n                                    ('[model]', '[model_atm]'),\n                                    ('model     atm', 'model_atm atm'),\n                                    ],\n                       'atm_mod_2d': [('xref', 'rref'),\n                                      ('yref', 'azref')],\n                       'atm_mod_2d_pt': [('[sigma_min]', '[sigma_max]')],\n                       'base_calc': [('plt_flg', 'plt_flag'),\n                                     ('pltflg', 'plt_flag')],\n                       'base_init': [('<base>', '<baseline>')],\n                       'base_plot': [('plt_flg', 'plt_flag'),\n                                     ('pltflg', 'plt_flag')],\n                       'cc_monitoring': [('...', '<...>')],\n                       'cct_sp_pt': [('pcct_sp_pt', 'pcct_sp')],\n                       'comb_interfs': [('combi_out', 'combi_int')],\n                       'coord_to_sarpix': [('north/lat', 'north_lat'),\n                                           ('east/lon', 'east_lon'),\n                                           ('SLC_par', '<SLC_MLI_par>'),\n                                           ('SLC/MLI_par', 'SLC_MLI_par')],\n                       'data2geotiff': [('nodata', 'no_data')],\n                       'def_mod': [('<def>', '<def_rate>'),\n                                   ('def         (output)', 'def_rate    (output)')],\n                       'dis2hgt': [('m/cycle', 'm_cycle')],\n                       'discc': [('min_corr', 'cmin'),\n                                 ('max_corr', 'cmax')],\n                       'disp2ras': [('<list>', '<DISP_tab>')],\n                       'dis_data': [('...', '<...>')],\n                       'dispwr': [('data_type', 'dtype')],\n                       'DORIS_vec': [('SLC_PAR', 'SLC_par')],\n                       'gc_map_fd': [('fdtab', 'fd_tab')],\n                       'gc_map_grd': [('<MLI_par>', '<GRD_par>')],\n                       'geocode_back': [('<gc_map>', '<lookup_table>'),\n                                        ('\\n  gc_map ', '\\n  lookup_table ')],\n                       'GRD_to_SR': [('SLC_par', 'MLI_par')],\n                       'haalpha': [('<alpha> <entropy>', '<alpha2> <entropy>'),\n                                   ('alpha       (output)', 'alpha2      (output)')],\n                       'histogram_ras': [('mean/stdev', 'mean_stdev')],\n                       'hsi_color_scale': [('[chip]', '[chip_width]')],\n                       'HUYNEN_DEC': [('T11_0', 'T11'),\n                                      ('<T12> <T13> <T11>', '<T11> <T12> <T13>'),\n                                      ('HUYNEN_DEC:', '***')],\n                       'interf_SLC': [('  SLC2_pa  ', '  SLC2_par  ')],\n                       'ionosphere_mitigation': [('<SLC1> <ID1>', '<ID1>')],\n                       'landsat2dem': [('<DEM>', '<image>')],\n                       'line_interp': [('input file', 'data_in'),\n                                       ('output file', 'data_out')],\n                       'm-alpha': [('<c2 ', '<c2> ')],\n                       'm-chi': [('<c2 ', '<c2> ')],\n                       'm-delta': [('<c2 ', '<c2> ')],\n                       'map_section': [('n1', 'north1'),\n                                       ('e1', 'east1'),\n                                       ('n2', 'north2'),\n                                       ('e2', 'east2'),\n                                       ('[coord]', '[coords]')],\n                       'mask_class': [('...', '<...>')],\n                       'mcf_pt': [('<azlks>', '[azlks]'),\n                                  ('<rlks>', '[rlks]')],\n                       'mk_2d_im_geo': [('exponent', 'exp')],\n                       'mk_adf2_2d': [('[alpha_max [', '[alpha_max] ['),\n                                      ('-m MLI_dir', 'mli_dir'),\n                                      ('-s scale', 'scale'),\n                                      ('-e exp', 'exponent'),\n                                      ('-u', 'update'),\n                                      ('-D', 'dem_par')],\n                       'mk_base_calc': [('<RSLC_tab>', '<SLC_tab>')],\n                       'mk_cpd_all': [('dtab', 'data_tab')],\n                       'mk_cpx_ref_2d': [('diff_tab', 'cpx_tab')],\n                       'mk_diff_tc_2d': [('<def>', '<def_rate>'),\n                                         ('def       (input)', 'def_rate  (input)')],\n                       'mk_dispmap2_2d': [('RMLI_image', 'MLI'),\n                                          ('RMLI_par', 'MLI_par'),\n                                          ('MLI_image', 'MLI'),\n                                          ('DISP_tab', 'disp_tab')],\n                       'mk_dispmap_2d': [('RMLI_image', 'MLI'),\n                                         ('RMLI_par', 'MLI_par'),\n                                         ('MLI_image', 'MLI'),\n                                         ('DISP_tab', 'disp_tab')],\n                       'mk_geo_data_all': [('data_geo_dir', 'geo_dir')],\n                       'mk_itab': [('<offset>', '<start>')],\n                       'mk_hgt_2d': [('m/cycle', 'm_cycle')],\n                       'mk_pol2rec_2d': [('data_tab', 'DIFF_tab'),\n                                         ('<type> <rmli>', '<dtype>'),\n                                         ('<dtype> <rmli>', '<dtype>'),\n                                         ('type           input', 'dtype          input'),\n                                         ('\\n    Options:\\n', ''),\n                                         ('-s scale', 'scale'),\n                                         ('-e exp', 'exponent'),\n                                         ('-a min', 'min'),\n                                         ('-b max', 'max'),\n                                         ('-R rmax', 'rmax'),\n                                         ('-m mode', 'mode'),\n                                         ('-u', 'update')],\n                       'mk_rasdt_all': [('RMLI_image', 'MLI'),\n                                        ('MLI_image', 'MLI')],\n                       'mk_rasmph_all': [('RMLI_image', 'MLI'),\n                                         ('MLI_image', 'MLI')],\n                       'mk_tab2': [('--linenumber', 'linenumber')],\n                       'mk_unw_2d': [('unw_mask1', 'unw_mask')],\n                       'mk_unw_ref_2d': [('diff_tab', 'DIFF_tab')],\n                       'MLI2pt': [('MLI_TAB', 'MLI_tab'),\n                                  ('pSLC_par', 'pMLI_par')],\n                       'mosaic': [('<..>', '<...>'),\n                                  ('DEM_parout', 'DEM_par_out')],\n                       'multi_class_mapping': [('...', '<...>')],\n                       'multi_def': [('<def>', '<def_rate>'),\n                                     ('def             (output)', 'def_rate        (output)')],\n                       'multi_look_geo': [('geo_SLC', 'SLC'),\n                                          ('SLC/MLI', ('SLC_MLI'))],\n                       'multi_look_MLI': [('MLI in_par', 'MLI_in_par')],\n                       'offset_fit': [('interact_flag', 'interact_mode')],\n                       'offset_plot_az': [('rmin', 'r_min'),\n                                          ('rmax', 'r_max')],\n                       'par_ASF_SLC': [('CEOS_SAR_leader', 'CEOS_leader')],\n                       'par_ASAR': [('ASAR/ERS_file', 'ASAR_ERS_file')],\n                       'par_EORC_JERS_SLC': [('slc', 'SLC')],\n                       'par_ERSDAC_PALSAR': [('VEXCEL_SLC_par', 'ERSDAC_SLC_par')],\n                       'par_ESA_JERS_SEASAT_SLC': [('[slc]', '[SLC]')],\n                       'par_ICEYE_GRD': [('<GeoTIFF>', '<GeoTIFF> <XML>'),\n                                         ('[mli]', '[MLI]')],\n                       'par_ICEYE_SLC': [('[slc]', '[SLC]')],\n                       'par_MSP': [('SLC/MLI_par', 'SLC_MLI_par')],\n                       'par_SIRC': [('UTC/MET', 'UTC_MET')],\n                       'par_TX_GRD': [('COSAR', 'GeoTIFF')],\n                       'par_UAVSAR_SLC': [('SLC/MLC_in', 'SLC_MLC_in'),\n                                          ('SLC/MLI_par', 'SLC_MLI_par'),\n                                          ('SLC/MLI_out', 'SLC_MLI_out')],\n                       'par_UAVSAR_geo': [('SLC/MLI_par', 'SLC_MLI_par')],\n                       'phase_sim': [('sim       (', 'sim_unw   (')],\n                       'product': [('wgt_flg', 'wgt_flag')],\n                       'radcal_MLI': [('MLI_PAR', 'MLI_par')],\n                       'radcal_PRI': [('GRD_PAR', 'GRD_par'),\n                                      ('PRI_PAR', 'PRI_par')],\n                       'radcal_SLC': [('SLC_PAR', 'SLC_par')],\n                       'ras2jpg': [('{', '{{'),\n                                   ('}', '}}')],\n                       'ras_data_pt': [('pdata1', 'pdata')],\n                       'ras_to_rgb': [('red channel', 'red_channel'),\n                                      ('green channel', 'green_channel'),\n                                      ('blue channel', 'blue_channel')],\n                       'rascc_mask_thinning': [('...', '[...]')],\n                       'rashgt': [('m/cycle', 'm_cycle')],\n                       'rashgt_shd': [('m/cycle', 'm_cycle'),\n                                      ('\\n  cycle ', '\\n  m_cycle ')],\n                       'rasdt_cmap_pt': [('pdata1', 'pdata')],\n                       'raspwr': [('hdrz', 'hdrsz')],\n                       'ras_ras': [('r_lin/log', 'r_lin_log'),\n                                   ('g_lin/log', 'g_lin_log'),\n                                   ('b_lin/log', 'b_lin_log')],\n                       'ras_ratio_dB': [('[min_cc] [max_cc] [scale] [exp]', '[min_value] [max_value] [dB_offset]')],\n                       'rasSLC': [('[header]', '[hdrsz]')],\n                       'ratio': [('wgt_flg', 'wgt_flag')],\n                       'restore_float': [('input file', 'data_in'),\n                                         ('output file', 'data_out'),\n                                         ('interpolation_limit', 'interp_limit')],\n                       'S1_coreg_TOPS_no_refinement': [('RLK', 'rlks'),\n                                                       ('AZLK', 'azlks')],\n                       'S1_OPOD_vec': [('SLC_PAR', 'SLC_par')],\n                       'single_class_mapping': [('>...', '> <...>')],\n                       'ScanSAR_burst_cc_ad': [('bx', 'box_min'),\n                                               ('by', 'box_max')],\n                       'ScanSAR_burst_to_mosaic': [('DATA_tab_ref', 'data_tab_ref'),\n                                                   ('[mflg] [dtype]', '[mflg]')],\n                       'ScanSAR_full_aperture_SLC': [('SLCR_dir', 'SLC2_dir')],\n                       'scale_base': [('SLC-1_par-2', 'SLC1_par-2')],\n                       'sigma2gamma': [('<gamma>', '<gamma0>'),\n                                       ('gamma  (output)', 'gamma0  (output)'),\n                                       ('pwr1', 'sigma0')],\n                       'SLC_interp_lt': [('SLC-2', 'SLC2'),\n                                         ('blksz', 'blk_size')],\n                       'SLC_intf': [('SLC1s_par', 'SLC-1s_par'),\n                                    ('SLC2Rs_par', 'SLC-2Rs_par')],\n                       'SLC_intf_geo2': [('cc        (', 'CC        (')],\n                       'SLC_interp_map': [('coffs2_sm', 'coffs_sm')],\n                       'SLC_mosaic_S1_TOPS': [('wflg', 'bflg')],\n                       'srtm_mosaic': [('<lon>', '<lon2>')],\n                       'SSI_INT_S1': [('<SLC2> <par2>', '<SLC_tab2>')],\n                       'texture': [('weights_flag', 'wgt_flag')],\n                       'ts_rate': [('sim_flg', 'sim_flag')],\n                       'TX_SLC_preproc': [('TX_list', 'TSX_list')],\n                       'uchar2float': [('infile', 'data_in'),\n                                       ('outfile', 'data_out')],\n                       'validate': [('ras1', 'ras_map'),\n                                    ('rasf_map', 'ras_map'),\n                                    ('ras2', 'ras_inv'),\n                                    ('rasf_inventory', 'ras_inv'),\n                                    ('class1[1]', 'class1_1'),\n                                    ('class1[2]', 'class1_2'),\n                                    ('class1[n]', 'class1_n'),\n                                    ('class2[1]', 'class2_1'),\n                                    ('class2[2]', 'class2_2'),\n                                    ('class2[n]', 'class2_n')]}\n    if command_base in parnames_lookup.keys():\n        for replacement in parnames_lookup[command_base]:\n            out = out.replace(*replacement)\n    ###########################################\n    # filter header (general command description) and usage description string\n    header = '\\n'.join([x.strip('* ') for x in re.findall('[*]{3}.*(?:[*]{3}|)', out)])\n    header = '| ' + header.replace('\\n', '\\n| ')\n    usage = re.search('usage:.*(?=\\n)', out).group()\n    \n    # filter required and optional arguments from usage description text\n    arg_req_raw = [re.sub(r'[^\\w.-]*', '', x) for x in re.findall('[^<]*<([^>]*)>', usage)]\n    arg_opt_raw = [re.sub(r'[^\\w.-]*', '', x) for x in re.findall(r'[^[]*\\[([^]]*)]', usage)]\n    \n    ###########################################\n    # add parameters missing in the usage argument lists\n    \n    appends = {'mk_adf2_2d': ['cc_min', 'cc_max', 'mli_dir', 'scale', 'exponent', 'update', 'dem_par'],\n               'mk_pol2rec_2d': ['scale', 'exponent', 'min', 'max', 'rmax', 'mode', 'update'],\n               'SLC_interp_S1_TOPS': ['mode', 'order'],\n               'SLC_interp_map': ['mode', 'order']}\n    \n    if command_base in appends.keys():\n        for var in appends[command_base]:\n            if var not in arg_opt_raw:\n                arg_opt_raw.append(var)\n    ###########################################\n    # define parameter replacements; this is intended for parameters which are to be aggregated into a list parameter\n    replacements = {'cc_monitoring': [(['nfiles', 'f1', 'f2', '...'],\n                                       ['files'],\n                                       ['a list of input data files (float)'])],\n                    'dis_data': [(['nstack', 'pdata1', '...'],\n                                  ['pdata'],\n                                  ['a list of point data stack files'])],\n                    'lin_comb': [(['nfiles', 'f1', 'f2', '...'],\n                                  ['files'],\n                                  ['a list of input data files (float)']),\n                                 (['factor1', 'factor2', '...'],\n                                  ['factors'],\n                                  ['a list of factors to multiply the input files with'])],\n                    'lin_comb_cpx': [(['nfiles', 'f1', 'f2', '...'],\n                                      ['files'],\n                                      ['a list of input data files (float)']),\n                                     (['factor1_r', 'factor2_r', '...'],\n                                      ['factors_r'],\n                                      ['a list of real part factors to multiply the input files with']),\n                                     (['factor1_i', 'factor2_i'],\n                                      ['factors_i'],\n                                      ['a list of imaginary part factors to multiply the input files with'])],\n                    'mask_class': [(['n_class', 'class_1', '...', 'class_n'],\n                                    ['class_values'],\n                                    ['a list of class map values'])],\n                    'mosaic': [(['nfiles', 'data_in1', 'DEM_par1', 'data_in2', 'DEM_par2', '...', '...'],\n                                ['data_in_list', 'DEM_par_list'],\n                                ['a list of input data files',\n                                 'a list of DEM/MAP parameter files for each data file'])],\n                    'multi_class_mapping': [(['nfiles', 'f1', 'f2', '...', 'fn'],\n                                             ['files'],\n                                             ['a list of input data files (float)'])],\n                    'rascc_mask_thinning': [(['thresh_1', '...', 'thresh_nmax'],\n                                             ['thresholds'],\n                                             ['a list of thresholds sorted from smallest to '\n                                              'largest scale sampling reduction'])],\n                    'single_class_mapping': [(['nfiles', 'f1', '...', 'fn'],\n                                              ['files'],\n                                              ['a list of point data stack files']),\n                                             (['lt1', 'ltn'],\n                                              ['thres_lower'],\n                                              ['a list of lower thresholds for the files']),\n                                             (['ut1', 'utn'],\n                                              ['thres_upper'],\n                                              ['a list of upper thresholds for the files'])],\n                    'validate': [(['nclass1', 'class1_1', 'class1_2', '...', 'class1_n'],\n                                  ['classes_map'],\n                                  ['a list of class values for the map data file (max. 16), 0 for all']),\n                                 (['nclass2', 'class2_1', 'class2_2', '...', 'class2_n'],\n                                  ['classes_inv'],\n                                  ['a list of class values for the inventory data file (max. 16), 0 for all'])]}\n    \n    if '..' in usage and command_base not in replacements.keys():\n        raise RuntimeError('the command contains multi-args which were not properly parsed')\n    \n    def replace(inlist, replacement):\n        outlist = list(inlist)\n        for old, new, description in replacement:\n            if old[0] not in outlist:\n                return outlist\n            outlist[outlist.index(old[0])] = new\n            for i in range(1, len(old)):\n                if old[i] in outlist:\n                    outlist.remove(old[i])\n        return dissolve(outlist)\n    \n    arg_req = list(arg_req_raw)\n    arg_opt = list(arg_opt_raw)\n    \n    if command_base in replacements.keys():\n        arg_req = replace(arg_req, replacements[command_base])\n        arg_opt = replace(arg_opt, replacements[command_base])\n    ###########################################\n    # check if there are any double parameters\n    \n    double = [k for k, v in Counter(arg_req + arg_opt).items() if v > 1]\n    if len(double) > 0:\n        raise RuntimeError('double parameter{0}: {1}'.format('s' if len(double) > 1 else '', ', '.join(double)))\n    ###########################################\n    # add a parameter inlist for commands which take interactive input via stdin\n    # the list of commands, which are interactive is hard to assess and thus likely a source of future errors\n    \n    inlist = ['create_dem_par', 'par_ESA_ERS']\n    \n    if command_base in inlist:\n        arg_req.append('inlist')\n    \n    ######################################################################################\n    # create the function argument string for the Python function\n    \n    # optional arguments are parametrized with '-' as default value, e.g., arg_opt='-'\n    argstr_function = ', '.join(arg_req + [x + \"='-'\" for x in arg_opt])\n    # a '-' in the parameter name is replaced with '_'\n    argstr_function = re.sub(r'([^\\'])-([^\\'])', r'\\1_\\2', argstr_function)\n    # replace unsupported 'def' parameter name with 'drm'\n    argstr_function = argstr_function.replace(', def=', ', drm=')\n    \n    # some commands have different defaults than '-'\n    replacements_defaults = {'S1_import_SLC_from_zipfiles': {'OPOD_dir': '.'}}\n    \n    if command_base in replacements_defaults.keys():\n        for key, value in replacements_defaults[command_base].items():\n            old = f\"{key}='-'\"\n            if isinstance(value, str):\n                new = f\"{key}='{value}'\"\n            else:\n                new = f\"{key}={value}\"\n            argstr_function = argstr_function.replace(old, new)\n    \n    # create the function definition string\n    fun_def = 'def {name}({args_fun}, logpath=None, outdir=None, shellscript=None):' \\\n        .format(name=command_base.replace('-', '_'),\n                args_fun=argstr_function)\n    \n    if command_base == '2PASS_INT':\n        fun_def = fun_def.replace(command_base, 'TWO_PASS_INT')\n    ######################################################################################\n    # special handling of flag args\n    flag_args = {'mk_adf2_2d': [('mli_dir', '-m', None),\n                                ('scale', '-s', None),\n                                ('exponent', '-e', None),\n                                ('update', '-u', False),\n                                ('dem_par', '-D', None)],\n                 'mk_pol2rec_2d': [('scale', '-s', None),\n                                   ('exp', '-e', None),\n                                   ('min', '-a', None),\n                                   ('max', '-b', None),\n                                   ('rmax', '-R', None),\n                                   ('mode', '-m', None),\n                                   ('update', '-u', False)],\n                 'mk_tab2': [('linenumber', '--linenumber', False)]}\n    \n    # replace arg default like arg='-' with arg=None or arg=False\n    if command_base in flag_args:\n        for arg in flag_args[command_base]:\n            fun_def = re.sub(f'{arg[0]}=\\'-\\'', f'{arg[0]}={arg[2]}', fun_def)\n    ######################################################################################\n    # create the process call argument string\n    \n    # a '-' in the parameter name is replaced with '_'\n    # e.g. 'arg1, arg2, arg3'\n    # if a parameter is named 'def' (not allowed in Python) it is renamed to 'drm'\n    \n    # inlist is not a proc arg but a parameter passed to function process\n    proc_args = arg_req + arg_opt\n    if command_base in inlist:\n        proc_args.remove('inlist')\n    proc_args_tmp = list(proc_args)\n    # insert the length of a list argument as a proc arg\n    if command_base in replacements.keys() and command_base != 'rascc_mask_thinning':\n        key = replacements[command_base][0][1]\n        if isinstance(key, list):\n            key = key[0]\n        proc_args_tmp.insert(proc_args_tmp.index(key), f'len({key})')\n    \n    if command_base == 'validate':\n        index = proc_args_tmp.index('classes_inv')\n        proc_args_tmp.insert(index, 'len(classes_inv)')\n    \n    argstr_process = ', '.join(proc_args_tmp) \\\n        .replace('-', '_') \\\n        .replace(', def,', ', drm,')\n    \n    # create the process argument list string\n    cmd_str = \"cmd = ['{command}', {args_cmd}]\".format(command=command, args_cmd=argstr_process)\n    \n    # special handling of optional flag args\n    # the args are removed from the cmd list and flags (plus values) added if not None or True\n    # e.g. '-u' if update=True or '-m /path' if mli_dir='/path'\n    if command_base in flag_args:\n        args = []\n        for arg in flag_args[command_base]:\n            cmd_str = cmd_str.replace(', {}'.format(arg[0]), '')\n            args.append(arg[0])\n            cmd_str += \"\\nif {a} is not {d}:\\n{i}cmd.append('{k}')\" \\\n                .format(i=indent, d=arg[2], k=arg[1], a=arg[0])\n            if arg[2] is None:\n                cmd_str += '\\n{i}cmd.append({a})'.format(i=indent, a=arg[0])\n    \n    # create the process call string\n    proc_str = \"process(cmd, logpath=logpath, outdir=outdir{inlist}, shellscript=shellscript)\" \\\n        .format(inlist=', inlist=inlist' if command_base in inlist else '')\n    fun_proc = '{0}\\n{1}'.format(cmd_str, proc_str)\n    \n    if command_base == 'lin_comb_cpx':\n        fun_proc = fun_proc.replace('factors_r, factors_i', 'zip(factors_r, factors_i)')\n    elif command_base == 'mosaic':\n        fun_proc = fun_proc.replace('data_in_list, DEM_par_list', 'zip(data_in_list, DEM_par_list)')\n    elif command_base == 'single_class_mapping':\n        fun_proc = fun_proc.replace('files, thres_lower, thres_upper', 'zip(files, thres_lower, thres_upper)')\n    \n    ######################################################################################\n    # create the function docstring\n    \n    # find the start of the docstring and filter the result\n    doc_start = 'input parameters:[ ]*\\n' if re.search('input parameters', out) else 'usage:.*(?=\\n)'\n    doc = '\\n' + out[re.search(doc_start, out).end():]\n    \n    # define a pattern containing individual parameter documentations\n    pattern = r'\\n[ ]*[<\\[]*(?P<par>{0})[>\\]]*[\\t ]+(?P<doc>.*)'.format(\n        '|'.join(arg_req_raw + arg_opt_raw).replace('.', r'\\.'))\n    \n    # identify the start indices of all pattern matches\n    starts = [m.start(0) for m in re.finditer(pattern, doc)] + [len(out)]\n    \n    # filter out all individual (parameter, description) docstring tuples\n    doc_items = []\n    j = 0\n    done = []\n    for i in range(0, len(starts) - 1):\n        doc_raw = doc[starts[i]:starts[i + 1]]\n        doc_list = list(re.search(pattern, doc_raw, flags=re.DOTALL).groups())\n        \n        if doc_list[0] not in proc_args:\n            if command_base in replacements.keys():\n                repl = replacements[command_base][0]\n                for k, item in enumerate(repl[1]):\n                    if item not in done:\n                        doc_items.append([item, repl[2][k]])\n                        done.append(item)\n                        j += 1\n            continue\n        \n        if doc_list[0] in done:\n            doc_items[-1][1] += doc_raw\n            continue\n        \n        while doc_list[0] != proc_args[j]:\n            doc_list_sub = [proc_args[j], 'not documented']\n            doc_items.append(doc_list_sub)\n            j += 1\n        \n        doc_items.append(doc_list)\n        done.append(doc_items[-1][0])\n        j += 1\n    \n    for k in range(j, len(proc_args)):\n        doc_items.append([proc_args[k], 'not documented'])\n    \n    # add a parameter inlist to the docstring tuples\n    if command_base in inlist:\n        pos = [x[0] for x in doc_items].index(arg_opt[0])\n        doc_items.insert(pos, ('inlist', 'a list of arguments to be passed to stdin'))\n    \n    # remove the replaced parameters from the argument lists\n    doc_items = [x for x in doc_items if x[0] in arg_req + arg_opt]\n    \n    # replace parameter names which are not possible in Python syntax, i.e. containing '-' or named 'def'\n    for i, item in enumerate(doc_items):\n        par = item[0].replace('-', '_').replace(', def,', ', drm,')\n        description = item[1]\n        doc_items[i] = (par, description)\n    \n    if command_base in ['par_CS_geo', 'par_KS_geo']:\n        doc_items.append(('MLI_par', '(output) ISP SLC/MLI parameter file (example: yyyymmdd.mli.par)'))\n        doc_items.append(('DEM_par', '(output) DIFF/GEO DEM parameter file (example: yyyymmdd.dem_par)'))\n        doc_items.append(('GEO', '(output) Geocoded image data file (example: yyyymmdd.geo)'))\n    \n    # check if all parameters are documented:\n    proc_args = [x.replace('-', '_').replace(', def,', ', drm,') for x in arg_req + arg_opt]\n    mismatch = [x for x in proc_args if x not in [y[0] for y in doc_items]]\n    if len(mismatch) > 0:\n        raise RuntimeError('parameters missing in docsring: {}'.format(', '.join(mismatch)))\n    ###########################################\n    # format the docstring parameter descriptions\n    \n    docstring_elements = ['Parameters\\n----------']\n    \n    # do some extra formatting\n    for i, item in enumerate(doc_items):\n        par, description = item\n        description = re.split(r'\\n+\\s*', description.strip('\\n'))\n        \n        # escape * characters (which are treated as special characters for bullet lists by sphinx)\n        description = [x.replace('*', r'\\\\*') for x in description]\n        \n        # convert all lines starting with an integer number or 'NOTE' to bullet list items\n        latest = None\n        for i in range(len(description)):\n            item = description[i]\n            if re.search('^(?:(?:-|)[-0-9]+|NOTE):', item):\n                latest = i\n                # prepend '* ' and replace missing spaces after a colon: 'x:x' -> 'x: x'\n                description[i] = '* ' + re.sub(r'((?:-|)[-0-9]+:)(\\w+)', r'\\1 \\2', item)\n        \n        # format documentation lines coming after the last bullet list item\n        # sphinx expects lines after the last bullet item to be indented by two spaces if\n        # they belong to the bullet item or otherwise a blank line to mark the end of the bullet list\n        if latest:\n            # case if there are still lines coming after the last bullet item,\n            # prepend an extra two spaces to these lines so that they are properly\n            # aligned with the text of the bullet item\n            if latest + 2 <= len(description):\n                i = 1\n                while latest + i + 1 <= len(description):\n                    description[latest + i] = '  ' + description[latest + i]\n                    i += 1\n            # if not, then insert an extra blank line\n            else:\n                description[-1] = description[-1] + '\\n'\n        \n        # parse the final documentation string for the current parameter\n        description = '\\n{0}{0}'.join(description).format(indent)\n        doc = '{0}:\\n{1}{2}'.format(par, indent, description)\n        docstring_elements.append(doc)\n    ###########################################\n    # add docsrings of general parameters and combine the result\n    \n    # create docstring for parameter logpath\n    doc = 'logpath: str or None\\n{0}a directory to write command logfiles to'.format(indent)\n    docstring_elements.append(doc)\n    \n    # create docstring for parameter outdir\n    doc = 'outdir: str or None\\n{0}the directory to execute the command in'.format(indent)\n    docstring_elements.append(doc)\n    \n    # create docstring for parameter shellscript\n    doc = 'shellscript: str or None\\n{0}a file to write the Gamma commands to in shell format'.format(indent)\n    docstring_elements.append(doc)\n    \n    # combine the complete docstring\n    fun_doc = '\\n{header}\\n\\n{doc}\\n' \\\n        .format(header=header,\n                doc='\\n'.join(docstring_elements))\n    ######################################################################################\n    \n    # combine the elements to a complete Python function string\n    fun = '''{defn}\\n\"\"\"{doc}\"\"\"\\n{proc}'''.format(defn=fun_def, doc=fun_doc, proc=fun_proc)\n    \n    # indent all lines and add an extra empty line at the end\n    fun = fun.replace('\\n', '\\n{}'.format(indent)) + '\\n'\n    \n    return fun\n\n\ndef parse_module(bindir, outfile):\n    \"\"\"\n    parse all Gamma commands of a module to functions and save them to a Python script.\n\n    Parameters\n    ----------\n    bindir: str\n        the `bin` directory of a module containing the commands\n    outfile: str\n        the name of the Python file to write\n\n    Returns\n    -------\n    \n    Examples\n    --------\n    >>> import os\n    >>> from pyroSAR.gamma.parser import parse_module\n    >>> outname = os.path.join(os.environ['HOME'], 'isp.py')\n    >>> parse_module('/cluster/GAMMA_SOFTWARE-20161207/ISP/bin', outname)\n    \"\"\"\n    \n    if not os.path.isdir(bindir):\n        raise OSError('directory does not exist: {}'.format(bindir))\n    \n    excludes = ['coord_trans',  # doesn't take any parameters and is interactive\n                'RSAT2_SLC_preproc',  # takes option flags\n                'mk_ASF_CEOS_list',  # \"cannot create: Directory nonexistent\"\n                '2PASS_UNW',  # parameter name inconsistencies\n                'mk_diff_2d',  # takes option flags\n                'gamma_doc'  # opens the Gamma documentation\n                ]\n    failed = []\n    outstring = ''\n    for cmd in sorted(finder(bindir, [r'^\\w+$'], regex=True), key=lambda s: s.lower()):\n        basename = os.path.basename(cmd)\n        if basename not in excludes:\n            try:\n                fun = parse_command(cmd)\n            except RuntimeError as e:\n                failed.append('{0}: {1}'.format(basename, str(e)))\n                continue\n            except DeprecationWarning:\n                continue\n            except:\n                failed.append('{0}: {1}'.format(basename, 'error yet to be assessed'))\n                continue\n            outstring += fun + '\\n\\n'\n    if len(outstring) > 0:\n        if not os.path.isfile(outfile):\n            with open(outfile, 'w') as out:\n                out.write('from pyroSAR.gamma.auxil import process\\n\\n\\n')\n        with open(outfile, 'a') as out:\n            out.write(outstring)\n    if len(failed) > 0:\n        info = 'the following functions could not be parsed:\\n{0}\\n({1} total)'\n        log.info(info.format('\\n'.join(failed), len(failed)))\n\n\ndef autoparse():\n    \"\"\"\n    automatic parsing of GAMMA commands.\n    This function will detect the GAMMA installation via environment variable `GAMMA_HOME`, detect all available\n    modules (e.g. ISP, DIFF) and parse all the module's commands via function :func:`parse_module`.\n    A new Python module will be created called `gammaparse`, which is stored under `$HOME/.pyrosar`.\n    Upon importing the `pyroSAR.gamma` submodule, this function is run automatically and module `gammaparse`\n    is imported as `api`.\n    \n    Returns\n    -------\n\n    Examples\n    --------\n    >>> from pyroSAR.gamma.api import diff\n    >>> print('create_dem_par' in dir(diff))\n    True\n    \"\"\"\n    home = ExamineGamma().home\n    target = os.path.join(os.path.expanduser('~'), '.pyrosar', 'gammaparse')\n    if not os.path.isdir(target):\n        os.makedirs(target)\n    for module in finder(home, ['[A-Z]*'], foldermode=2):\n        outfile = os.path.join(target, os.path.basename(module).lower() + '.py')\n        if not os.path.isfile(outfile):\n            log.info('parsing module {} to {}'.format(os.path.basename(module), outfile))\n            for submodule in ['bin', 'scripts']:\n                log.info(submodule)\n                try:\n                    parse_module(os.path.join(module, submodule), outfile)\n                except OSError:\n                    log.info('..does not exist')\n    modules = [re.sub(r'\\.py', '', os.path.basename(x)) for x in finder(target, [r'[a-z]+\\.py$'], regex=True)]\n    if len(modules) > 0:\n        with open(os.path.join(target, '__init__.py'), 'w') as init:\n            init.write('from . import {}'.format(', '.join(modules)))\n"
  },
  {
    "path": "pyroSAR/gamma/parser_demo.py",
    "content": "from pyroSAR.gamma.auxil import process\n\n\ndef adapt_filt(int, sm, width, low_SNR_thr='-', filt_width='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Adaptive bandpass filtering of interferograms\n    | Copyright 2023, Gamma Remote Sensing, v3.6 clw 18-Apr-2023\n    \n    Parameters\n    ----------\n    int:\n        (input) complex interferogram image filename\n    sm:\n        (output) smoothed interferogram filename\n    width:\n        number of samples/row\n    low_SNR_thr:\n        low SNR threshold (enter - for default: .25);\n    filt_width:\n        filter width in pixels (enter - for default: 1.0)\n    xmin:\n        offset to starting range pixel(enter - for default: 0)\n    xmax:\n        offset last range pixel (enter - for default: width-1)\n    ymin:\n        offset to starting azimuth row (enter - for default: 0)\n    ymax:\n        offset to last azimuth row (enter - for default: nlines-1)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/adapt_filt', int, sm, width, low_SNR_thr, filt_width, xmin, xmax, ymin, ymax]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef adf(interf, sm, cc, width, alpha='-', nfft='-', cc_win='-', step='-', loff='-', nlines='-', wfrac='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Adaptive interferogram bandpass filter based on the power spectral density\n    | Copyright 2024, Gamma Remote Sensing, v3.9 12-Mar-2024 clw/cm\n    \n    Parameters\n    ----------\n    interf:\n        (input) interferogram (fcomplex)\n    sm:\n        (output) filtered interferogram (fcomplex)\n    cc:\n        (output) filtered interferogram correlation coefficient (float)\n    width:\n        number of samples/line\n    alpha:\n        exponent for non-linear filtering (enter - for default: 0.40)\n    nfft:\n        filtering FFT window size, 2\\\\*\\\\*N, 8 --> 512, (enter - for default: 32)\n    cc_win:\n        correlation parameter estimation window size odd, max: 15 (enter - for default: 5)\n    step:\n        processing step (enter - for default: nfft/8)\n    loff:\n        offset to starting line to process (enter - for default: 0)\n    nlines:\n        number of lines to process (enter - for default: to end of file)\n    wfrac:\n        minimum fraction of points required to be non-zero in the filter window (enter - for default: 0.500)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/adf', interf, sm, cc, width, alpha, nfft, cc_win, step, loff, nlines, wfrac]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef adf2(interf, cc_interf, sm, cc_filt, width, alpha_max='-', nfft='-', cc_win='-', step='-', loff='-', nlines='-', wfrac='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Adaptive interferogram filter based on the power spectral density and correlation coefficient\n    | Copyright 2023, Gamma Remote Sensing, v1.2 18-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    interf:\n        (input) complex interferogram (fcomplex)\n    cc_interf:\n        (input) correlation coefficient of the input interferogram (float)\n    sm:\n        (output) filtered interferogram (fcomplex)\n    cc_filt:\n        (output) filtered interferogram correlation coefficient (float)\n    width:\n        number of samples/line\n    alpha_max:\n        maximum value for the adaptive filter exponent (enter - for default: 0.50)\n    nfft:\n        filter window FFT size, 2\\\\*\\\\*N, 8->512, (enter - for default: 32)\n    cc_win:\n        filtered interferogram correlation estimation window size odd, max: 21 (enter - for default: 9)\n    step:\n        processing step in range and azimuth (enter - for default: nfft/8)\n    loff:\n        offset to starting line to process (enter - for default: 0)\n    nlines:\n        number of lines to process (enter - for default: to end of file)\n    wfrac:\n        minimum fraction of points required to be non-zero in the filter window (enter - for default: 0.200)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/adf2', interf, cc_interf, sm, cc_filt, width, alpha_max, nfft, cc_win, step, loff, nlines, wfrac]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef af_SLC(SLC_par, SLC, rwin='-', azwin='-', dr='-', daz='-', thres='-', a1_flg='-', b0_flg='-', offsets='-', n_ovr='-', roff='-', azoff='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Focus testing for SLC data using autofocus estimation of effective velocity\n    | Copyright 2023, Gamma Remote Sensing, v1.6 18-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) ISP SLC image parameter file\n    SLC:\n        (input) single-look complex image\n    rwin:\n        range window size (enter - for default: 1024)\n    azwin:\n        azimuth window size (enter - for default: 4096)\n    dr:\n        range sample increment (enter - for default: 1024,  enter 0 for single patch)\n    daz:\n        azimuth line increment (enter - for default: 8192,  enter 0 for single patch)\n    thres:\n        offset estimation SNR threshold (enter - for default: 10.000)\n    a1_flg:\n        fit a1 for first derivative of the effective velocity w.r.t.range (enter - for default)\n            * 0: no (default)\n            * 1: yes\n    \n    b0_flg:\n        fit b0 for first derivative of the effective velocity w.r.t. along-track time (enter - for default)\n            * 0: no (default)\n            * 1: yes\n    \n    offsets:\n        (output) range and azimuth offsets and SNR data in text format (enter - for no output)\n    n_ovr:\n        SLC oversampling factor (1,2,4: enter - for default: 1)\n    roff:\n        range offset for single patch center (enter - for default: image center in range)\n    azoff:\n        azimuth offset for single patch center (enter - for default: image center in azimuth)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/af_SLC', SLC_par, SLC, rwin, azwin, dr, daz, thres, a1_flg, b0_flg, offsets, n_ovr, roff, azoff]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ASAR_LO_phase_drift(SLC1_par, SLC2_par, OFF_par, ph_drift, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate interferometric phase correction due to drift of the ASAR local oscillator\n    | Copyright 2023, Gamma Remote Sensing, v1.2 18-Apr-2023 clw\n    \n    Parameters\n    ----------\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file \n    ph_drift:\n        (output) interferometric phase correction due to drift of the ASAR LO (radians)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ASAR_LO_phase_drift', SLC1_par, SLC2_par, OFF_par, ph_drift]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ASAR_XCA(ASA_XCA, antenna, swath='-', pol='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Interpretation of ASAR external calibration data file (ASA_XCA)\n    | Copyright 2006, Gamma Remote Sensing, v1.1 7-June-2006 awi/uw/clw\n    \n    Parameters\n    ----------\n    ASA_XCA:\n        (input) ASAR external calibration data file (binary)\n    antenna:\n        (output) 1-way antenna gain pattern file or '-' (if not provided)\n            or 'all' to generate all ASAR antenna diagrams\n    swath:\n        ASAR swath (IS1,IS2,...IS7;SS1,SS2,...SS5)\n    pol:\n        polarization (HH,VV,HV,VH)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ASAR_XCA', ASA_XCA, antenna, swath, pol]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ave_cpx(cpx_list, width, cpx_ave, start='-', nlines='-', zflag='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate average of a set of FCOMPLEX images\n    | Copyright 2022, Gamma Remote Sensing, v2.1 17-Aug-2022 clw/cm\n    \n    Parameters\n    ----------\n    cpx_list:\n        (input) list of coregistered images (FCOMPLEX)\n    width:\n        number of samples/line\n    cpx_ave:\n        (output) average of images listed in cpx_list (FCOMPLEX)\n    start:\n        starting line (enter - for default: 1)\n    nlines:\n        number of lines to process (enter - for default: entire file)\n    zflag:\n        zero flag (enter - for default)\n            * 0: interpret 0.0 as missing data value (default)\n            * 1: interpret 0.0 as valid data\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ave_cpx', cpx_list, width, cpx_ave, start, nlines, zflag]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ave_image(im_list, width, ave_image, start='-', nlines='-', pixav_x='-', pixav_y='-', zflag='-', nmin='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate average of a set of FLOAT images\n    | Copyright 2022, Gamma Remote Sensing, v2.6 17-Aug-2022 clw/cm\n    \n    Parameters\n    ----------\n    im_list:\n        (input) list of coregistered images (FLOAT)\n    width:\n        number of samples/line\n    ave_image:\n        (output) average of images listed in im_list (FLOAT)\n    start:\n        starting line (enter - for default: 1)\n    nlines:\n        number of lines to process (enter - for default: entire file)\n    pixav_x:\n        number of pixels to average in width  (enter - for default: 1)\n    pixav_y:\n        number of pixels to average in height (enter - for default: 1)\n    zflag:\n        zero flag (enter - for default)\n            * 0: interpret 0.0 as missing data value (default)\n            * 1: interpret 0.0 as valid data\n    \n    nmin:\n        minimum number of images required to calculate the average if zflag = 0 (enter - for default: 3/4\\\\*nfiles)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ave_image', im_list, width, ave_image, start, nlines, pixav_x, pixav_y, zflag, nmin]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef az_integrate(data, width, azi, cflg, scale='-', lz='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate azimuth integral of float data (unwrapped phase or azimuth offsets)\n    | Copyright 2012, Gamma Remote Sensing, v1.2 6-Feb-2012\n    \n    Parameters\n    ----------\n    data:\n        (input) input data (example: SBI dtrapped phase) (float)\n    width:\n        (input) number of range samples/line \n    azi:\n        (output) input data integrated along azimuth (float)\n    cflg:\n        integration constant flag:\n            * 0: set azimuth integral value to 0.0 at specified line\n            * 1: set average of the azimuth integral to 0.0\n    \n    scale:\n        scale factor to apply to the data (enter - for default, default: 1.0)\n    lz:\n        line offset where the azimuth integral is set to 0.0 (cflg = 0, enter - for default, default: 0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/az_integrate', data, width, azi, cflg, scale, lz]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef az_spec_SLC(SLC, SLC_par, spectrum, roff='-', namb='-', pltflg='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Doppler centroid estimate from SLC images\n    | Copyright 2023, Gamma Remote Sensing, v3.0 19-Apr-2023 clw\n    \n    Parameters\n    ----------\n    SLC:\n        (input) SAR image data file (FCOMPLEX or SCOMPLEX format)\n    SLC_par:\n        (input) ISP SLC image parameter file\n    spectrum:\n        (output) Doppler spectrum (text format)\n    roff:\n        range sample offset to center of estimation window (enter - for default: center of swath)\n    namb:\n        number of multiples of the PRF to add to the estimated centroid (enter - for default: 0)\n    pltflg:\n        azimuth spectrum plotting flag (enter - for default)\n            * 0: none (default)\n            * 1: output plot in PNG format\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/az_spec_SLC', SLC, SLC_par, spectrum, roff, namb, pltflg]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef base_copy(SLC1_par, baseline1, SLC2_par, baseline2, time_rev='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate baseline file for a subsection of a reference SLC\n    | Copyright 2023, Gamma Remote Sensing, v1.2 24-Apr-2023 ts/clw/uw\n    \n    Parameters\n    ----------\n    SLC1_par:\n        (input) ISP image parameter file of the reference SLC\n    baseline1:\n        (input) baseline file derived using the reference SLC geometry\n    SLC2_par:\n        (input) ISP image parameter file corresponding to the subsection of the reference SLC\n    baseline2:\n        (output) baseline file derived using the geometry and timing of the SLC subsection\n    time_rev:\n        SLC image time reversal flag (enter - for default)\n            * 1: normal (default)\n            * -1: time-reversed\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_copy', SLC1_par, baseline1, SLC2_par, baseline2, time_rev]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef base_est_fft(interf, SLC1_par, OFF_par, baseline, nazfft='-', r_samp='-', az_line='-', nrfft='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Estimate baseline from interferogram phase spectrum\n    | Copyright 2023, Gamma Remote Sensing, v2.3 clw/uw 18-Apr-2023\n    \n    Parameters\n    ----------\n    interf:\n        (input) multilook interferogram with residual range and azimuth fringes\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    baseline:\n        (output) baseline file\n    nazfft:\n        size of azimuth FFT (2\\\\*\\\\*N) (enter - for default: 512)\n    r_samp:\n        range pixel offset to center of the FFT window (enter - for default: center)\n    az_line:\n        line offset from start of the interf. for the  FFT window (enter - for default: center)\n    nrfft:\n        size of the range FFT (2\\\\*\\\\*N), minimum: 32 (enter - for default: 512)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_est_fft', interf, SLC1_par, OFF_par, baseline, nazfft, r_samp, az_line, nrfft]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef base_init(SLC1_par, SLC2_par, OFF_par, interf, baseline, mflag='-', nrfft='-', nazfft='-', r_samp='-', az_line='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Estimate initial baseline using orbit state vectors, offsets, and interferogram phase\n    | Copyright 2023, Gamma Remote Sensing, v2.8 18-Apr-2023 clw/uw/cm\n    \n    Parameters\n    ----------\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file (enter - for none)\n    interf:\n        (input) unflattened interferogram (enter - for none)\n            base      (output) baseline parameter file\n    baseline:\n        not documented\n    mflag:\n        baseline estimation method flag (enter - for default)\n            mflag    b_para    b_perp    input\n            * 0:     orbits    orbits    p1,p2  (default)\n            * 1:     offsets   offsets   p1,p2,off\n            * 2:     orbits    fft       p1,p2,off,int\n            * 3:     offsets   fft       p1,p2,off,int\n            * 4:     fft       fft       p1,off,int   \n    \n    nrfft:\n        size of range FFT   (512, 1024,...) (enter - for default determined from image width)\n    nazfft:\n        size of azimuth FFT (512, 1024,...) (enter - for default determined from image azimuth lines)\n    r_samp:\n        range pixel offset to center of the FFT window (enter - for default, default: range center)\n    az_line:\n        line offset from start of the interf. for the  FFT window (enter - for default, default: azimuth center)\n            * NOTE: Not all input data files are required for the different methods\n              enter - for files that are not provided\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_init', SLC1_par, SLC2_par, OFF_par, interf, baseline, mflag, nrfft, nazfft, r_samp, az_line]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef base_ls(SLC_par, OFF_par, gcp_ph, baseline, ph_flag='-', bc_flag='-', bn_flag='-', bcdot_flag='-', bndot_flag='-', bperp_min='-', SLC2R_par='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Least squares baseline estimation using terrain heights\n    | Copyright 2023, Gamma Remote Sensing, v2.4 18-Apr-2023 clw/uw/cm\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) ISP parameter file of the reference SLC\n    OFF_par:\n        (input) ISP interferogram/offset parameter file\n    gcp_ph:\n        (input) ground control point heights + extracted unwrapped phase (text format)\n    baseline:\n        (input) baseline parameter file\n    ph_flag:\n        restore range phase ramp (enter - for default)\n            * 0: do not restore (default)\n            * 1: restore\n    \n    bc_flag:\n        cross-track baseline component estimate (enter - for default)\n            * 0: orbit-derived\n            * 1: estimate from data (default)\n    \n    bn_flag:\n        normal baseline component estimate (enter - for default)\n            * 0: orbit-derived\n            * 1: estimate from data (default)\n    \n    bcdot_flag:\n        cross-track baseline rate estimate (enter - for default)\n            * 0: orbit-derived\n            * 1: estimate from data (default)\n    \n    bndot_flag:\n        normal baseline rate estimate (enter - for default)\n            * 0: orbit-derived (default)\n            * 1: estimate from data\n    \n    bperp_min:\n        minimum perpendicular baseline required for L.S estimation (m, enter - for default:  10.0)\n    SLC2R_par:\n        (input) parameter file of resampled SLC, required if SLC2 frequency differs from SLC1 (enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_ls', SLC_par, OFF_par, gcp_ph, baseline, ph_flag, bc_flag, bn_flag, bcdot_flag, bndot_flag, bperp_min, SLC2R_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef base_orbit(SLC1_par, SLC2_par, baseline, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Estimate baseline from orbit state vectors\n    | Copyright 2023, Gamma Remote Sensing, v4.5 clw/cm 18-Apr-2023\n    \n    Parameters\n    ----------\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    baseline:\n        (output) baseline file (text format, enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_orbit', SLC1_par, SLC2_par, baseline]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef base_perp(baseline, SLC1_par, OFF_par, time_rev='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate baseline components perpendicular and parallel to look vector\n    | Copyright 2023, Gamma Remote Sensing, v3.6 18-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    baseline:\n        (input) baseline file (text)\n    SLC1_par:\n        (input) ISP parameter file of SLC1 (reference SLC)\n    OFF_par:\n        (input) ISP interferogram/offset parameter file\n    time_rev:\n        SLC image time reversal flag (enter - fo default)\n            * 1: normal (default)\n            * -1: time-reversed\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/base_perp', baseline, SLC1_par, OFF_par, time_rev]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef bpf(data_in, data_out, width, fc_x, bw_x, fc_y, bw_y, roff='-', azoff='-', nr='-', naz='-', dtype='-', zflag='-', beta='-', fir_len='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Interferometric SAR Processor (ISP): Program GAMMA_SOFTWARE-20250625/ISP/bin/bpf.c\n    | Copyright 2023, Gamma Remote Sensing, v1.9 18-Apr-2023 clw\n    | Bandpass filter for 2-dimensional image data (FCOMPLEX, SCOMPLEX, and FLOAT)\n    \n    Parameters\n    ----------\n    data_in:\n        (input) input image data  file\n    data_out:\n        (output) bandpass filtered image data\n    width:\n        number of samples/line\n    fc_x:\n        normalized x-coord. (across) filter center frequency (range: -0.5 --> 0.5)\n    bw_x:\n        normalized x-coord. bandwidth (range: 0 --> 1.0)\n    fc_y:\n        normalized y-coord. (down) filter center frequency (range: -0.5 --> 0.5)\n    bw_y:\n        normalized y-coord. bandwidth (range: 0 --> 1.0)\n    roff:\n        offset to starting range to filter (enter - for default: 0)\n    azoff:\n        offset to starting azimuth to filter (enter - for default: 0)\n    nr:\n        number of range pixels to filter  (enter - for default: width - roff)\n    naz:\n        number of azimuth lines to filter (enter - for default: nlines - azoff)\n    dtype:\n        data type (enter - for default)\n            * 0: FCOMPLEX (default)\n            * 1: SCOMPLEX\n            * 2: FLOAT\n    \n    zflag:\n        zero data flag (enter - for default)\n            * 0: set output to 0.0 when the input data are 0.0 (no_data)(default)\n            * 1: 0.0 values are considered as valid data\n    \n    beta:\n        Kaiser window beta parameter (enter - for default:    4.538)\n    fir_len:\n        finite impulse response filter length (enter - for default: 64)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/bpf', data_in, data_out, width, fc_x, bw_x, fc_y, bw_y, roff, azoff, nr, naz, dtype, zflag, beta, fir_len]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef bridge_unw(int, flag, unw, bridge, width, xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Phase unwrap new regions with bridges to regions already unwrapped\n    | Copyright 2023, Gamma Remote Sensing, v1.5 19-Apr-2023 clw\n    \n    Parameters\n    ----------\n    int:\n        (input) interferogram (FCOMPLEX)\n    flag:\n        (input) unwrapping flag file\n    unw:\n        (input/output) unwrapped phase (FLOAT) \n    bridge:\n        (input) bridge data file (text format)\n    width:\n        number of samples/row\n    xmin:\n        starting range pixel offset to unwrap (enter - for default: 0)\n    xmax:\n        last range pixel offset to unwrap (enter - for default: width-1)\n    ymin:\n        starting azimuth row offset to unwrap, relative to start (enter - for default: 0)\n    ymax:\n        last azimuth row offset to unwrap, relative to start (enter - for default: nlines-1)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/bridge_unw', int, flag, unw, bridge, width, xmin, xmax, ymin, ymax]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef cc_wave(interf, MLI1, MLI2, cc, width, bx='-', by='-', wflg='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Estimate interferometric correlation coefficient\n    | Copyright 2023, Gamma Remote Sensing, v6.4 6-Dec-2023 clw/uw/cm\n    \n    Parameters\n    ----------\n    interf:\n        (input) normalized complex interferogram (FCOMPLEX)\n    MLI1:\n        (input) multilook intensity image of the first scene (FLOAT) (enter - for none)\n    MLI2:\n        (input) multilook intensity image of the second scene (FLOAT) (enter - for none)\n    cc:\n        (output) estimated correlation coefficient (FLOAT)\n    width:\n        number of samples/line\n    bx:\n        estimation window size in columns (enter - for default: 5.0)\n    by:\n        estimation window size in lines (enter - for default: 5.0)\n    wflg:\n        estimation window (enter - for default):\n            * 0: rectangular (constant weighting) (default)\n            * 1: circular triangular\n            * 2: circular Gaussian\n            * 3: normalized vector sum with rectangular window (constant weighting)\n            * NOTE: This estimator does not use the MLI data\n    \n    xmin:\n        starting range pixel offset (enter - for default: 0)\n    xmax:\n        last range pixel offset (enter - for default: width - 1)\n    ymin:\n        starting azimuth row offset, relative to start (enter -  for default: 0)\n    ymax:\n        last azimuth row offset, relative to start (enter - for default: nlines - 1)\n            * NOTE:   The normalized vector sum (wflg = 3) is used as estimator when the MLI images are not provided.\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/cc_wave', interf, MLI1, MLI2, cc, width, bx, by, wflg, xmin, xmax, ymin, ymax]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef clear_flag(flag, width, flag_bits, xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Clear phase unwrapping flag bits\n    | Copyright 2023, Gamma Remote Sensing, v1.7 19-Apr-2023 clw\n    \n    Parameters\n    ----------\n    flag:\n        (input)phase unwrapping flag filename \n    width:\n        number of samples/row\n    flag_bits:\n        byte with value of flag(s) to be cleared: \n            Charges = 3\tGuides = 4\tLow SNR = 8\tVisited = 16\n            BRANCH PT. = 32\tCuts   = 64\tLawn    = 128\n    xmin:\n        starting range pixel offset (enter - for default: 0)\n    xmax:\n        last range pixel offset (enter - for default: width-1)\n    ymin:\n        starting azimuth row offset, relative to start (enter - for default: 0)\n    ymax:\n        last azimuth row offset, relative to start (enter - for default: nlines-1)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/clear_flag', flag, width, flag_bits, xmin, xmax, ymin, ymax]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef corr_flag(corr, flag, width, corr_thr, xmin='-', xmax='-', ymin='-', ymax='-', border='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Low correlation region detection for phase unwrapping\n    | Copyright 2023, Gamma Remote Sensing, v2.6 19-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    corr:\n        (input)interferometric correlation file\n    flag:\n        (input/output) phase unwrapping flag filename \n    width:\n        number of samples/row\n    corr_thr:\n        correlation threshold (0 --> 1.0)\n    xmin:\n        starting range pixel offset (enter - for default: 0)\n    xmax:\n        last range pixel offset (enter - for default: width-1)\n    ymin:\n        starting azimuth row offset, relative to start (enter - for default: 0)\n    ymax:\n        last azimuth row offset, relative to start (enter - for default: nlines-1)\n    border:\n        effective range of low coherence pixels to set low coherence flag (enter - for default: 2)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/corr_flag', corr, flag, width, corr_thr, xmin, xmax, ymin, ymax, border]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef create_offset(SLC1_par, SLC2_par, OFF_par, algorithm='-', rlks='-', azlks='-', iflg='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Create and update ISP offset and interferogram parameter files\n    | Copyright 2023 Gamma Remote Sensing v5.6 18-Apr-2023 clw/uw/cm\n    \n    Parameters\n    ----------\n    SLC1_par:\n        (input) SLC1/MLI1 ISP image parameter filename (reference)\n    SLC2_par:\n        (input) SLC2/MLI2 ISP image parameter filename\n    OFF_par:\n        (input/output) ISP offset/interferogram parameter file\n    algorithm:\n        offset estimation algorithm\n            * 1: intensity cross-correlation (default)\n            * 2: fringe visibility\n    \n    rlks:\n        number of interferogram range looks (enter -  for default: 1)\n    azlks:\n        number of interferogram azimuth looks (enter - for default: 1)\n    iflg:\n        interactive mode flag (enter -  for default)\n            * 0: non-interactive\n            * 1: interactive (default)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/create_offset', SLC1_par, SLC2_par, OFF_par, algorithm, rlks, azlks, iflg]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef dcomp_sirc(infile, outfile, samples, loff='-', nlines='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Extract SIR-C SLC compressed single-pol data\n    | Copyright 2023, Gamma Remote Sensing, v1.5 18-Apr-2023 clw\n    \n    Parameters\n    ----------\n    infile:\n        (input) SIR-C single-pol SLC compressed data\n    outfile:\n        (output) complex floating point data\n    samples:\n        number of polarimetric samples per input line (4 bytes/sample)\n    loff:\n        offset to starting line (enter - for default: 0)\n    nlines:\n        number of lines to copy (enter - or 0 for default: entire file)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/dcomp_sirc', infile, outfile, samples, loff, nlines]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef dcomp_sirc_quad(infile, outfile, samples, parameter, loff='-', nlines='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Extract SIR-C MLC or SLC compressed quad-pol data\n    | Copyright 2023, Gamma Remote Sensing, v1.5 18-Apr-2023 uw/clw\n    \n    Parameters\n    ----------\n    infile:\n        (input) SIR-C SLC or MLC quad-pol compressed data\n    outfile:\n        (output) complex floating point data\n    samples:\n        number of polarimetric samples per input line (10 bytes/sample)\n    parameter:\n        polarimetric parameter to extract from SLC or MLC product:\n            * 0:  SLC total power\n            * 1:  SLC-HH\n            * 2:  SLC-HV\n            * 3:  SLC-VH\n            * 4:  SLC-VV\n            * 5:  MLC total power\n            * 6:  MLC-HVHV\\\\*\n            * 7:  MLC-VVVV\\\\*\n            * 8:  MLC-HHHH\\\\*\n            * 9:  MLC-HHHV\\\\*\n            * 10: MLC-HHVV\\\\*\n            * 11: MLC-HVVV\\\\*\n    \n    loff:\n        offset to starting line (enter - for default: 0)\n    nlines:\n        number of lines to copy (enter - or 0 for default: entire file)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/dcomp_sirc_quad', infile, outfile, samples, parameter, loff, nlines]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef DELFT_vec2(SLC_par, DELFT_dir, nstate='-', interval='-', ODR='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Extract and interpolate DELFT ERS-1, ERS-2, and ENVISAT state vectors\n    | Copyright 2023, Gamma Remote Sensing, v2.7 19-Apr-2023 clw\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) ISP image parameter file\n    DELFT_dir:\n        directory containing Delft orbit arclist and ODR files for ERS-1, ERS-2 or ENVISAT\n            * NOTE: enter . for current directory\n    \n    nstate:\n        number of state vectors to generate (enter - for default, >= 15)\n    interval:\n        time interval between state vectors in the ISP image parameter file (s) (enter - for default: 10.0)\n    ODR:\n        ODR file to use (include path) rather than ODR file determined from the Delft orbit arclist (enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/DELFT_vec2', SLC_par, DELFT_dir, nstate, interval, ODR]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef doppler_2d_SLC(SLC, SLC_par, dop2d, loff='-', blsz='-', nbl='-', a2_flg='-', b0_flg='-', b1_flg='-', c0_flg='-', namb='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | 2-D Doppler centroid trend estimation from SLC data\n    | Copyright 2025, Gamma Remote Sensing, v1.3 14-May-2025 clw/cm\n    \n    Parameters\n    ----------\n    SLC:\n        (input) SLC image (SCOMPLEX or FCOMPLEX format)\n    SLC_par:\n        (input) SLC parameter file\n    dop2d:\n        (output) estimated doppler centroid as a function of range for each block (text format) (enter - for none)\n    loff:\n        number of lines offset (enter - for default: 0)\n    blsz:\n        block size lines, minimum: 256 (enter - for default: 2048)\n    nbl:\n        number of blocks (enter - for default: calculated automatically)\n    a2_flg:\n        fit a2 for second derivative of the Doppler centroid w.r.t.range (Hz/m/m) (enter - for default)\n            * 0: no (default)\n            * 1: yes\n    \n    b0_flg:\n        fit b0 for first derivative of the Doppler centroid w.r.t. along-track time (Hz/sec) (enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    b1_flg:\n        fit b1 for along-track rate of the change in slope of Doppler w.r.t. range (Hz/sec/m)(enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    c0_flg:\n        fit c0 for second derivative of the Doppler centroid w.r.t. along-track time (Hz/sec/sec) (enter - for default)\n            * 0: no (default)\n            * 1: yes\n    \n    namb:\n        user defined number of Doppler ambiguities to add to the Doppler function (enter - for default: 0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/doppler_2d_SLC', SLC, SLC_par, dop2d, loff, blsz, nbl, a2_flg, b0_flg, b1_flg, c0_flg, namb]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef DORIS_vec(SLC_par, DOR, nstate='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Extract ENVISAT DORIS state vectors and write to an ISP image parameter file\n    | Copyright 2023, Gamma Remote Sensing, v1.5 18-Apr-2023 clw\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input/output)ISP SLC/MLI image parameter file\n    DOR:\n        (input) ASAR DORIS data file (DOR_VOR_AXVF)\n    nstate:\n        number of state vectors to extract (enter - for default: 11)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/DORIS_vec', SLC_par, DOR, nstate]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef error_stat(d1, d2, width, dtype, roff, loff, nr, nl, report, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate statistics for two data files and their difference (FLOAT or FCOMPLEX)\n    | Copyright 2017, Gamma Remote Sensing, v1.2 clw 7-Jan-2016\n    \n    Parameters\n    ----------\n    d1:\n        (input) data file 1\n    d2:\n        (input) data file 2\n    width:\n        image line width (samples/line)\n    dtype:\n        data type for d1 and d2:\n            * 0: FLOAT\n            * 1: FCOMPLEX\n    \n    roff:\n        sample offset to region start (enter - for default: 0)\n    loff:\n        line offset to region start (enter - for default: 0)\n    nr:\n        region width (samples, enter - for default: width - roff)\n    nl:\n        number of lines in the region (enter - for default: data_lines - loff)\n    report:\n        output text file (keyword:value format)\n            keywords: data_1, data_2, d1_mean, d2_mean, d1_stddev, d2_stddev, root_mean_square_error, normalized_mean_square_error,\n            cross_correlation_coefficient, cross_correlation_angle, total_samples, non_zero_samples, valid_fraction\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/error_stat', d1, d2, width, dtype, roff, loff, nr, nl, report]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef fill_gaps(data_in, width, data_out, dtype='-', method='-', max_dist='-', bp_flag='-', win='-', ds_method='-', ds_size='-', ds_data='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Fill gaps in 2D raster file\n    | Copyright 2023, Gamma Remote Sensing, v2.4 18-Apr-2023 cm\n    \n    Parameters\n    ----------\n    data_in:\n        (input) input data file (FLOAT / FCOMPLEX)\n    width:\n        width of input data\n    data_out:\n        (output) output data file (FLOAT / FCOMPLEX)\n    dtype:\n        input and output data type (enter - for default)\n            * 0: FLOAT (default)\n            * 1: FCOMPLEX\n    \n    method:\n        method flag (enter - for default: 4)\n            * 0: Laplace interpolation and linear extrapolation - least squares solution\n            * 1: Laplace interpolation and linear extrapolation - smaller system of linear equations than in method #0 in case of few missing values - least squares solution\n            * 2: Laplace interpolation and linear extrapolation - solves a direct linear system of equations for the missing values (not a least squares solution)\n            * 3: biharmonic interpolation - implementation similar to method #1 - least squares solution\n            * 4: spring analogy: assumes springs (with a nominal length of zero) connect each node with every neighbor - least squares solution (default)\n            * 5: average of the 8 nearest neighbors - this method solves a direct linear system for the missing values (not a least squares solution)\n            * NOTE: small gaps: use method #0, #1 or #3 - large gaps: use method #2, #4 or #5 - most demanding: method #3\n    \n    max_dist:\n        maximum interpolation / extrapolation distance in pixels (enter - or 0 for default: unlimited)\n    bp_flag:\n        perform block processing (enter - for default: 0)\n            * 0: no block processing (default)\n            * 1: block processing (faster, avoid overflow, however might be slightly less accurate)\n            * NOTE: when block processing is selected, a two-step process is carried out: 1: solving the downsampled array (coarse processing), 2: block processing\n    \n    win:\n        block size (pixels, 10 < win < 1000, enter - for default: 100)\n    ds_method:\n        method flag (0 - 5, same choices as for [method] option) (enter - for default: same as [method])\n            * NOTE: for an input containing large gaps, method #2, #4 or #5 may yield more appropriate results.\n    \n    ds_size:\n        maximum size of downsampled data (for both width and height) (pixels, ds_size > 10, enter - for default: 400)\n    ds_data:\n        (output) write intermediate data after solving the downsampled array (FLOAT / FCOMPLEX)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/fill_gaps', data_in, width, data_out, dtype, method, max_dist, bp_flag, win, ds_method, ds_size, ds_data]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef fspf(data_in, data_out, width, dtype='-', r_max='-', spf_type='-', MLI_par='-', interp_mode='-', order='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP fspf: Fast spatial filter for 2D data\n    | Copyright 2025, Gamma Remote Sensing, v2.0 9-Apr-2025 of/clw/uw/cm\n    \n    Parameters\n    ----------\n    data_in:\n        (input) input image data\n    data_out:\n        (output) spatially filtered image data\n    width:\n        number of samples/row\n    dtype:\n        data type (enter - for default):\n            * 0: FCOMPLEX\n            * 1: SCOMPLEX\n            * 2: FLOAT (default)\n    \n    r_max:\n        maximum filter radius (range samples) (enter - for default: 64)\n    spf_type:\n        spatial filter type (enter - for default):\n            * 0: uniform average (default for FCOMPLEX and SCOMPLEX)\n            * 1: triangular weighted average: 1 - (r/r_max)\n            * 2: quadratic weighted average: 1 - (r/r_max)^2\n            * 3: Gaussian weighted average: exp(-2.\\\\*(r^2/r_max^2))\n            * 4: linear least-squares (default for FLOAT data)\n            * 5: median\n    \n    MLI_par:\n        MLI or SLC parameter file with the same number of looks as the input image, required for GPRI data (enter - for none)\n    interp_mode:\n        interpolation method for resampling the data to the original size after filtering\n            * 0: bicubic spline (default)\n            * 1: bicubic spline sqrt(x)\n            * 2: B-spline interpolation (default B-spline degree: 3)\n            * 3: B-spline interpolation sqrt(x) (default B-spline degree: 3)\n    \n    order:\n        B-Spline interpolation degree (2->9) (enter - default: 3)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/fspf', data_in, data_out, width, dtype, r_max, spf_type, MLI_par, interp_mode, order]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef gcp_phase(unw, OFF_par, gcp, gcp_ph, win_sz='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Extract unwrapped phase at GCP locations\n    | Copyright 2023, Gamma Remote Sensing, v1.6 19-Apr-2023 clw\n    \n    Parameters\n    ----------\n    unw:\n        (input) unwrapped interferometric phase\n    OFF_par:\n        (input) ISP interferogram/offset parameter file\n    gcp:\n        (input) ground control point data (text format)\n    gcp_ph:\n        (output) ground control point data + extracted unwrapped phase (text)\n    win_sz:\n        window size for averaging phase for each GCP, must be odd (enter - for default: 1)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/gcp_phase', unw, OFF_par, gcp, gcp_ph, win_sz]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef grasses(int, flag, unw, width, xmin='-', xmax='-', ymin='-', ymax='-', xinit='-', yinit='-', init_ph='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Phase unwrapping by region growing\n    | Copyright 2023, Gamma Remote Sensing, v4.4 19-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    int:\n        (input) interferogram filename\n    flag:\n        (input) unwrapping flag filename\n    unw:\n        (output) unwrapped phase filename\n    width:\n        number of samples/row\n    xmin:\n        starting range pixel offset (enter - for default: 0)\n    xmax:\n        last range pixel offset (enter - for default: width-1)\n    ymin:\n        starting azimuth row offset, relative to start (enter - for default: 0)\n    ymax:\n        last azimuth row offset, relative to start (enter - for default: nlines-1)\n    xinit:\n        starting range pixel for unwrapping (enter - for default: width/2)\n    yinit:\n        starting row to unwrap (enter - for default: height/2)\n    init_ph:\n        flag to set phase at starting point to 0.0 (enter - for default)\n            * 0: not set to 0.0 (default)\n            * 1: set to 0.0\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/grasses', int, flag, unw, width, xmin, xmax, ymin, ymax, xinit, yinit, init_ph]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef GRD_to_SR(GRD_par, MLI_par, OFF_par, in_file, out_file, rlks='-', azlks='-', interp_mode='-', sr_rsp='-', sr_azsp='-', degree='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Conversion to slant range for ISP MLI and INSAR ground range data of type FLOAT\n    | Copyright 2023, Gamma Remote Sensing, v2.5 18-Apr-2023 uw/clw/cm\n    \n    Parameters\n    ----------\n    GRD_par:\n        (input) SLC parameter file of output ground range image\n    MLI_par:\n        (input/output) MLI ISP image parameter file for slant range image\n            * NOTE: delete an existing MLI parameter file to recalculate the output MLI parameters\n    \n    OFF_par:\n        (input) ISP offset/interferogram parameter file of input image (enter - image in MLI geometry)\n    in_file:\n        (input) ground range image (FLOAT)\n    out_file:\n        (output) slant range image (FLOAT)\n    rlks:\n        multi-looking in range (prior to resampling, enter - for default: 1)\n    azlks:\n        multi-looking in azimuth (prior to resampling, enter - for default: 1)\n    interp_mode:\n        interpolation mode (enter - for default)\n            * 0: nearest-neighbor\n            * 1: bicubic spline\n            * 2: bicubic spline log(x)\n            * 3: bicubic spline sqrt(x)\n            * 4: B-spline interpolation (default B-spline degree: 3)\n            * 5: B-spline interpolation sqrt(x) (default) (default B-spline degree: 3)\n            * NOTE: log and sqrt interpolation modes should only be used with non-negative data!\n    \n    sr_rsp:\n        output image slant range sample spacing (m) (enter - for default: c/(2\\\\*adc_sampling_rate)\n    sr_azsp:\n        output image azimuth sample spacing (m) (enter - for default: (input image azimuth spacing) \\\\* azlks)\n    degree:\n        B-spline degree (2->9) (enter - for default: 3)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/GRD_to_SR', GRD_par, MLI_par, OFF_par, in_file, out_file, rlks, azlks, interp_mode, sr_rsp, sr_azsp, degree]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef hgt_map(unw, SLC_par, OFF_par, baseline, hgt, gr, ph_flag='-', loff='-', nlines='-', SLC2R_par='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Interferometric height/ground range estimation vs. slant range\n    | Copyright 2023, Gamma Remote Sensing, v5.3 clw/uw 18-Apr-2023\n    \n    Parameters\n    ----------\n    unw:\n        (input) unwrapped interferometric phase\n    SLC_par:\n        (input) ISP parameter file for the reference SLC\n    OFF_par:\n        (input) ISP offset/interferogram processing parameters\n    baseline:\n        (input) baseline parameter file\n    hgt:\n        (output) height file (in slant range geometry) relative to the WGS-84 ellipsoid\n    gr:\n        (output) cross-track ground ranges on the WGS-84 ellipsoid (in slant range geometry)\n    ph_flag:\n        restore phase slope flag (enter - for default)\n            * 0: no phase change\n            * 1: add back phase ramp (default)\n    \n    loff:\n        offset to starting line (enter - for default: 0)\n    nlines:\n        number of lines to calculate (enter - for default: to end of file)\n    SLC2R_par:\n        (input) parameter file of resampled SLC, required if SLC2 frequency differs from SLC1 (enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/hgt_map', unw, SLC_par, OFF_par, baseline, hgt, gr, ph_flag, loff, nlines, SLC2R_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef image_stat(image, width, roff='-', loff='-', nr='-', nl='-', report='-', median_flg='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate mean, standard deviation, number of non-zero values, min, max and median for a rectangular image region (FLOAT format)\n    | Copyright 2025, Gamma Remote Sensing, v1.6 27-May-2025 clw/cm\n    \n    Parameters\n    ----------\n    image:\n        (input) image data file (FLOAT)\n    width:\n        image line width (samples/line)\n    roff:\n        sample offset to region start (enter - for default: 0)\n    loff:\n        line offset to region start (enter - for default: 0)\n    nr:\n        region width (samples, enter - for default: width - roff)\n    nl:\n        number of lines in the region (enter - for default: image_lines - loff)\n    report:\n        output text file (keyword:value format, enter - for none)\n            keywords: file, mean, stdev, total_samples, non_zero_samples, fraction_valid, min, max, median\n    median_flg:\n        median calculation flag (enter - for default)\n            * 0: do not calculate median\n            * 1: calculate median (default, memory use may be large)\n            * NOTE: only the non-zero samples are considered in the statistical values\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/image_stat', image, width, roff, loff, nr, nl, report, median_flg]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef init_offset(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, rlks='-', azlks='-', rpos='-', azpos='-', offr='-', offaz='-', thres='-', rwin='-', azwin='-', cflag='-', deramp='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Determine initial offset between SLC images using correlation of image intensity\n    | Copyright 2023, Gamma Remote Sensing, v3.3 clw/cm 18-Apr-2023\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) single-look complex image 1 (reference)\n    SLC2:\n        (input) single-look complex image 2\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    rlks:\n        number of range looks (enter - for default: 1)\n    azlks:\n        number of azimuth looks (enter - for default: 1)\n    rpos:\n        center of patch in range (samples) (enter - for default: image center)\n    azpos:\n        center of patch in azimuth (lines) (enter - for default: image center)\n    offr:\n        initial range offset (samples) (enter - for default: 0)\n    offaz:\n        initial azimuth offset (lines) (enter - for default: 0)\n    thres:\n        cross-correlation threshold (enter - for default: 0.150)\n    rwin:\n        range window size (enter - for default: 512)\n    azwin:\n        azimuth window size (enter - for default: 512)\n    cflag:\n        copy offsets to the range and azimuth offset polynomials in the OFF_par (enter - for default)\n            * 0: do not copy\n            * 1: copy constant range and azimuth offset (default)\n    \n    deramp:\n        deramp SLC phase flag (enter - for default)\n            * 0: no deramp (Doppler centroid close to 0) (default)\n            * 1: deramp SLC phase\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/init_offset', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, rlks, azlks, rpos, azpos, offr, offaz, thres, rwin, azwin, cflag, deramp]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef init_offset_orbit(SLC1_par, SLC2_par, OFF_par, rpos='-', azpos='-', cflag='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Initial SLC image offset estimation from orbit state-vectors and image parameters\n    | Copyright 2020, Gamma Remote Sensing, v1.9 18-Apr-2023 clw/uw/cm\n    \n    Parameters\n    ----------\n    SLC1_par:\n        (input) SLC1 parameter file\n    SLC2_par:\n        (input) SLC2 parameter file\n    OFF_par:\n        (input/output) ISP/offset parameter file\n    rpos:\n        range position for offset estimation (enter - for default: center of SLC1)\n    azpos:\n        azimuth position for offset estimation (enter - for default: center of SLC1)\n    cflag:\n        copy offsets to the range and azimuth offset polynomials in the OFF_par (enter - for default)\n            * 0: do not copy\n            * 1: copy constant range and azimuth offset (default)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/init_offset_orbit', SLC1_par, SLC2_par, OFF_par, rpos, azpos, cflag]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef interf_SLC(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, MLI1, MLI2, interf, rlks='-', azlks='-', loff='-', nltot='-', rfilt='-', azfilt='-', s_off='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Interferogram generation using a pair of SLC images\n    | Copyright 2023, Gamma Remote Sensing, v5.0 clw/uw 18-Apr-2023\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) single-look complex image 1 (reference)\n    SLC2:\n        (input) single-look complex image 2\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    MLI1:\n        (output) multi-look intensity image 1\n    MLI2:\n        (output) multi-look intensity image 2\n    interf:\n        interferogram from SLC1 and SLC2\n    rlks:\n        number of interferogram range looks (enter - for default: 2)\n    azlks:\n        number of interferogram azimuth looks (enter - for default: 10)\n    loff:\n        offset to starting line of interferogram (relative to start of SLC1) (enter - for default: 0)\n    nltot:\n        number of SLC lines to process (enter - or 0 for default: to end of file)\n    rfilt:\n        range common band filtering flag (enter - for default)\n            * 0: OFF\n            * 1: ON (default)\n    \n    azfilt:\n        azimuth common band filtering flag (enter - for default)\n            * 0: OFF\n            * 1: ON (default)\n    \n    s_off:\n        offset to the nominal range spectral shift (frac. of range sampling freq.) (enter - for default: 0.0)\n            * NOTE: enter - as filename to avoid creation of corresponding output file\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/interf_SLC', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, MLI1, MLI2, interf, rlks, azlks, loff, nltot, rfilt, azfilt, s_off]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef interp_ad(data_in, data_out, width, r_max='-', np_min='-', np_max='-', w_mode='-', dtype='-', cp_data='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Weighted interpolation of gaps in 2D data using an adaptive smoothing window\n    | Copyright 2018, Gamma Remote Sensing, v2.1 13-Jun-2018 clw/uw\n    \n    Parameters\n    ----------\n    data_in:\n        (input) data with gaps\n    data_out:\n        (output) data with gaps filled by interpolation\n    width:\n        number of samples/row\n    r_max:\n        maximum interpolation window radius (default(-): 16)\n    np_min:\n        minimum number of points used for the interpolation (default(-): 16)\n    np_max:\n        maximum number of points used for the interpolation (default(-): 16)\n    w_mode:\n        data weighting mode (enter - for default):\n            * 0: constant\n            * 1: 1 - (r/r_max)\n            * 2: 1 - (r/r_max)\\\\*\\\\*2  (default)\n            * 3: exp(-2.\\\\*(r\\\\*\\\\*2/r_max\\\\*\\\\*2))\n    \n    dtype:\n        input and output data type:\n            * 0: FCOMPLEX\n            * 1: SCOMPLEX\n            * 2: FLOAT (default)\n            * 3: INT\n            * 4: SHORT\n    \n    cp_data:\n        copy data flag:\n            * 0: do not copy input data values to output\n            * 1: copy input data values to output (default)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/interp_ad', data_in, data_out, width, r_max, np_min, np_max, w_mode, dtype, cp_data]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef mask_data(data_in, width, data_out, mask, dtype='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Mask float or fcomplex data using an 8-bit SUN/BMP/TIFF format raster image\n    | Copyright 2022, Gamma Remote Sensing, v1.6 8-Nov-2022 clw/cm\n    \n    Parameters\n    ----------\n    data_in:\n        (input) data file (FLOAT or FCOMPLEX format)\n    width:\n        width of input data file\n    data_out:\n        (output) data file, same data format as input\n    mask:\n        (input) mask file, SUN/BMP/TIFF raster format, 8-bits/pixel\n            output data values are set to 0.0 at all locations where the mask is black (0,0,0) or dn = 0\n            * NOTE: mask file must have the same width as the input data file\n    \n    dtype:\n        data format:\n            * 0: FLOAT (default)\n            * 1: FCOMPLEX\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/mask_data', data_in, width, data_out, mask, dtype]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef mcf(interf, wgt, mask, unw, width, tri_mode='-', roff='-', loff='-', nr='-', nlines='-', npat_r='-', npat_az='-', ovrlap='-', r_init='-', az_init='-', init_flag='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Phase unwrapping using Minimum Cost Flow (MCF) on a triangular mesh\n    | Copyright 2024, Gamma Remote Sensing, v2.9 clw/uw/cm 4-Apr-2024\n    \n    Parameters\n    ----------\n    interf:\n        (input) interferogram (\\\\*.int,\\\\*.diff,\\\\*.flt) (FCOMPLEX)\n    wgt:\n        (input) weight factors (0 -> 1.0, e.g. coherence map) file (FLOAT) (enter - for uniform weights)\n    mask:\n        (input) validity mask (SUN/BMP/TIFF raster format, value 0 -> pixel not used) (enter - if no mask)\n    unw:\n        (output) unwrapped phase image (\\\\*.unw) (FLOAT)\n    width:\n        number of samples/row\n    tri_mode:\n        triangulation mode (enter - for default)\n            * 0: filled triangular mesh\n            * 1: Delaunay triangulation\n            * 2: filled triangular mesh, replacing gaps with noise (default)\n            * 3: filled triangular mesh, replacing gaps and outside boundary with noise\n    \n    roff:\n        offset to starting range of section to unwrap (enter - for default: 0)\n    loff:\n        offset to starting line of section to unwrap (enter - for default: 0)\n    nr:\n        number of range samples of section to unwrap (enter - for default: width - roff)\n    nlines:\n        number of lines of section to unwrap (enter - for default: total number of lines - loff)\n    npat_r:\n        number of patches in range (enter - for default: 1, enter 0 to automatically define number of patches)\n    npat_az:\n        number of patches in azimuth (enter - for default: 1, enter 0 to automatically define number of patches)\n    ovrlap:\n        overlap between patches in pixels (overlap >= 7, enter - for default: 1024)\n    r_init:\n        phase reference point range offset (enter - for default: center of valid data bounding box)\n    az_init:\n        phase reference point azimuth offset (enter - for default: center of valid data bounding box)\n    init_flag:\n        flag to set phase at reference point (enter - for default)\n            * 0: use initial point phase value (default)\n            * 1: set phase to 0.0 at initial point\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/mcf', interf, wgt, mask, unw, width, tri_mode, roff, loff, nr, nlines, npat_r, npat_az, ovrlap, r_init, az_init, init_flag]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef MLI_cat(MLI1, MLI2, MLI1_par, MLI2_par, MLI3, MLI3_par, dtype='-', mflg='-', overlap='-', interp_mode='-', degree='-', extrapol='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Concatenate two MLI images using B-spline interpolation\n    | Copyright 2023, Gamma Remote Sensing, v2.0 18-Apr-2023 awi/cm/clw\n    \n    Parameters\n    ----------\n    MLI1:\n        (input) MLI1 image (single-look)\n    MLI2:\n        (input) MLI2 image to be appended to MLI1\n    MLI1_par:\n        (input) MLI1 ISP image parameter file\n    MLI2_par:\n        (input) MLI2 ISP image parameter file\n    MLI3:\n        (output) concatenated MLI image\n    MLI3_par:\n        (output) ISP image parameter file for concatenated image\n    dtype:\n        input/output data type (enter - for default)\n            * 0: FLOAT (default)\n            * 1: FCOMPLEX\n            * NOTE: FCOMPLEX is for differential interferograms\n    \n    mflg:\n        mosaicking option flag (enter - for default)\n            * 0: in overlapping areas, use MLI2 data to fill MLI1 empty areas (default)\n            * 1: in overlapping areas, do not use MLI2 data to fill MLI1 empty areas\n    \n    overlap:\n        number of pixels at the edge of MLI1 valid areas to replace by MLI2 data (only if mflg=0, enter - for default: 0)\n    interp_mode:\n        interpolation mode in case of different geometries (enter - for default)\n            * 0: B-spline interpolation (default for FCOMPLEX)\n            * 1: B-spline interpolation sqrt(x) (default for FLOAT)\n            * NOTE: sqrt interpolation mode should only be used with non-negative data!\n    \n    degree:\n        B-spline degree (2->9) (enter - default: 4)\n    extrapol:\n        extrapolation flag (enter - for default)\n            * 0: do not extrapolate (default)\n            * 1: extrapolate last line if needed\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/MLI_cat', MLI1, MLI2, MLI1_par, MLI2_par, MLI3, MLI3_par, dtype, mflg, overlap, interp_mode, degree, extrapol]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef MLI_copy(MLI_in, MLI_in_par, MLI_out, MLI_out_par, roff='-', nr='-', loff='-', nl='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Copy MLI data file with options for segment extraction\n    | Copyright 2019, Gamma Remote Sensing, v4.9 15-Oct-2019 uw/clw/cm\n    \n    Parameters\n    ----------\n    MLI_in:\n        (input) multi-look intensity image (float format)\n    MLI_in_par:\n        (input) ISP image parameter file for input MLI\n    MLI_out:\n        (output) selected MLI section (float format)\n    MLI_out_par:\n        (output) ISP image parameter file for output MLI\n    roff:\n        offset to starting range sample (enter - for default: 0)\n    nr:\n        number of range samples (enter - for default: to end of line\n    loff:\n        offset to starting line (enter - for default: 0)\n    nl:\n        number of lines to copy (enter - for default: to end of file)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/MLI_copy', MLI_in, MLI_in_par, MLI_out, MLI_out_par, roff, nr, loff, nl]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef mosaic_WB(data_tab, dtype, data_out, data_par_out, sc_flg='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP: Program GAMMA_SOFTWARE-20250625/ISP/bin/mosaic_WB.c\n    | Copyright 2018, Gamma Remote Sensing, v1.3 26-Apr-2018 clw/cm\n    | Mosaic Wide-Beam ScanSAR data processed by the MSP\n    \n    Parameters\n    ----------\n    data_tab:\n        (input) 2 column list of data  and ISP image parameter files for the beams in the mosaic (text)\n    dtype:\n        (input) input data type:\n            * 0: FLOAT\n            * 1: FCOMPLEX\n    \n    data_out:\n        (output) output image mosaic\n    data_par_out:\n        (output) ISP image parameter file for output image mosaic\n    sc_flg:\n        intensity scaling flag:\n            * 0: do not scale different beam data values\n            * 1: use overlap regions to scale beam intensities (default)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/mosaic_WB', data_tab, dtype, data_out, data_par_out, sc_flg]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef multi_look(SLC, SLC_par, MLI, MLI_par, rlks, azlks, loff='-', nlines='-', scale='-', exp='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate a multi-look intensity (MLI) image from an SLC image\n    | Copyright 2022, Gamma Remote Sensing, v4.7 8-Aug-2022 clw/uw/cm\n    \n    Parameters\n    ----------\n    SLC:\n        (input) single-look complex image (SCOMPLEX or FCOMPLEX)\n    SLC_par:\n        (input) SLC ISP image parameter file\n    MLI:\n        (output) multi-look intensity image (FLOAT)\n    MLI_par:\n        (output) MLI ISP image parameter file\n    rlks:\n        number of range looks (INT)\n    azlks:\n        number of azimuth looks (INT)\n    loff:\n        offset to starting line (enter - for default: 0)\n    nlines:\n        number of SLC lines to process (enter - for default: entire file)\n    scale:\n        scale factor for output MLI (enter - for default: 1.0)\n    exp:\n        exponent for the output MLI (enter - for default: 1.0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_look', SLC, SLC_par, MLI, MLI_par, rlks, azlks, loff, nlines, scale, exp]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef multi_look2(SLC, SLC_par, MLI, MLI_par, r_dec, az_dec, rwin='-', azwin='-', wflg='-', n_ovr='-', lanczos='-', beta='-', scale='-', exp='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate an MLI image from an SLC with optional oversampling and separate multilooking and decimation factors\n    | Copyright 2024, Gamma Remote Sensing, v1.9 10-Jun-2024 clw/cm\n    \n    Parameters\n    ----------\n    SLC:\n        (input) single-look complex image (SCOMPLEX or FCOMPLEX)\n    SLC_par:\n        (input) SLC image parameter file\n    MLI:\n        (output) multi-look intensity image (FLOAT)\n    MLI_par:\n        (output) MLI image parameter file\n    r_dec:\n        range decimation factor (int)\n    az_dec:\n        azimuth decimation factor (int)\n    rwin:\n        averaging window width (int)  (enter - for default: r_dec)\n    azwin:\n        averaging window height (int) (enter - for default: az_dec)\n    wflg:\n        window weighting function (enter - for default):\n            * 0: rectangular (default)\n            * 1: Kaiser\n            * 2: circular Gaussian\n    \n    n_ovr:\n        oversampling factor 1 -> 2 (enter - for default: 1)\n    lanczos:\n        Lanczos interpolator order 5 -> 9 (enter - for default: 7)\n    beta:\n        Gaussian or Kaiser window parameter (enter - for default: 2.0)\n    scale:\n        scale factor for output MLI (enter - for default: 1.0)\n    exp:\n        exponent for the output MLI (enter - for default: 1.0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_look2', SLC, SLC_par, MLI, MLI_par, r_dec, az_dec, rwin, azwin, wflg, n_ovr, lanczos, beta, scale, exp]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef multi_look_MLI(MLI_in, MLI_in_par, MLI_out, MLI_out_par, rlks, azlks, loff='-', nlines='-', scale='-', e_flag='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Multilooking (averaging and decimation) of MLI images\n    | Copyright 2019, Gamma Remote Sensing, v1.9 29-Oct-2019 clw/cm\n    \n    Parameters\n    ----------\n    MLI_in:\n        (input) multi-look intensity image (MLI) file (float)\n    MLI_in_par:\n        (input) MLI parameter file\n    MLI_out:\n        (output) multi-looked MLI image (float)\n    MLI_out_par:\n        (output) MLI parameter file for output MLI\n    rlks:\n        range looks for multi-looking\n    azlks:\n        azimuth looks for multi-looking\n    loff:\n        offset to starting line (enter - for default: 0)\n    nlines:\n        number of input MLI lines to process (enter - for default: entire file)\n    scale:\n        scale factor for output MLI (enter - for default: 1.0)\n    e_flag:\n        extent flag (enter - for default)\n            * 0: only permit pixels with the full number of looks (default)\n            * 1: permit pixels without the full number of looks\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_look_MLI', MLI_in, MLI_in_par, MLI_out, MLI_out_par, rlks, azlks, loff, nlines, scale, e_flag]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef multi_look_ScanSAR(SLC_tab, MLI, MLI_par, rlks, azlks, bflg='-', SLCR_tab='-', scale='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate MLI mosaic from ScanSAR SLC burst data (Sentinel-1, TerraSAR-X, RCM...)\n    | Copyright 2023, Gamma Remote Sensing v4.6 30-Nov-2023 awi/clw/uw/cm\n    \n    Parameters\n    ----------\n    SLC_tab:\n        (input) 3 column list of ScanSAR SLC, swaths are listed in order from near to far range\n            SLC_tab line entries:   SLC   SLC_par  TOPS_par\n    MLI:\n        (output) mosaicked MLI image (non-overlapping burst windows)\n    MLI_par:\n        (output) MLI image parameter file\n    rlks:\n        number of range looks\n    azlks:\n        number of azimuth looks\n    bflg:\n        burst window calculation flag (enter - for default):\n            * 0: use existing burst window parameters if they exist, otherwise calculate burst window parameters (default)\n            * 1: calculate burst window parameters from burst parameters and the number of range and azimuth looks\n    \n    SLCR_tab:\n        (input) 3 column list of the reference scene, swaths are listed in order from near to far range, (enter - for default: none)\n            SLCR_tab line entries: SLC  SLC_par  TOPS_par\n            When generating an MLI mosaic from resampled ScanSAR SLC data, the SLC_tab of the reference scene must be provided\n    scale:\n        scale factor for output MLI (enter - for default: calculate from calibration gain in SLC parameter file)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_look_ScanSAR', SLC_tab, MLI, MLI_par, rlks, azlks, bflg, SLCR_tab, scale]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef multi_real(data_in, OFF_par_in, data_out, OFF_par_out, rlks='-', azlks='-', loff='-', nlines='-', roff='-', nsamp='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate multi-look averaged or interpolated 2D image (float data)\n    | Copyright 2023, Gamma Remote Sensing, v2.7 19-Apr-2023 clw/uw/cm\n    \n    Parameters\n    ----------\n    data_in:\n        (input) input float image file\n    OFF_par_in:\n        (input) interferogram/offset parameter file for input image\n    data_out:\n        (output) output multi-look or interpolated float data file\n    OFF_par_out:\n        (input/output) interferogram/offset parameter file for output, if already existing, used as input\n    rlks:\n        number of range looks, values < -1, interpreted as an image oversampling factor (enter - for default: 1)\n    azlks:\n        number azimuth looks,  values < -1, interpreted as an image oversampling factor (enter - for default: 1)\n    loff:\n        line offset to starting line (enter - for default: 0)\n    nlines:\n        number of lines (enter - for default: to end of file)\n    roff:\n        offset to starting range sample (enter - for default: 0)\n    nsamp:\n        number of range samples to extract (enter - for default: to end of line)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_real', data_in, OFF_par_in, data_out, OFF_par_out, rlks, azlks, loff, nlines, roff, nsamp]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef multi_SLC_WSS(SLC, SLC_par, MLI, MLI_par, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate multi-look intensity image (MLI) from a ASAR Wide-Swath SLC\n    | Copyright 2023, Gamma Remote Sensing v1.3 18-Apr-2023 clw/awi\n    \n    Parameters\n    ----------\n    SLC:\n        (input) ASAR Wide-Swath SLC image\n    SLC_par:\n        (input) ASAR Wide-Swath SLC image parameter file\n    MLI:\n        (output) multi-look intensity image\n    MLI_par:\n        (output) MLI image parameter file\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/multi_SLC_WSS', SLC, SLC_par, MLI, MLI_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef neutron(intensity, flag, width, n_thres, ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate phase unwrapping neutrons using image intensity\n    | Copyright 2023, Gamma Remote Sensing, v2.4 19-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    intensity:\n        (input) image intensity \n    flag:\n        (input) phase unwrapping flag file\n    width:\n        number of samples/row\n    n_thres:\n        neutron threshold, multiples of the average intensity (enter - for default: 6.0)\n    ymin:\n        offset to starting azimuth row (enter - for default: 0)\n    ymax:\n        offset to last azimuth row (enter - for default: nlines-1)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/neutron', intensity, flag, width, n_thres, ymin, ymax]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_add(OFF_par1, OFF_par2, OFF_par3, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Add range and azimuth offset polynomial coefficients\n    | Copyright 2008, Gamma Remote Sensing, v1.1 12-Feb-2008 clw\n    \n    Parameters\n    ----------\n    OFF_par1:\n        (input) ISP offset/interferogram parameter file\n    OFF_par2:\n        (input) ISP offset/interferogram parameter file\n    OFF_par3:\n        (output) ISP offset/interferogram parameter file with sums of the\n            range and azimuth offset polynomials in OFF_par1 and OFF_par2\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_add', OFF_par1, OFF_par2, OFF_par3]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_fit(offs, ccp, OFF_par, coffs='-', coffsets='-', thres='-', npoly='-', interact_mode='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Range and azimuth offset polynomial estimation\n    | Copyright 2023, Gamma Remote Sensing, v3.9 18-Apr-2023 clw/uw/cm\n    \n    Parameters\n    ----------\n    offs:\n        (input) range and azimuth offset estimates for each patch (FCOMPLEX)\n    ccp:\n        (input) cross-correlation or SNR of each patch (FLOAT)\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    coffs:\n        (output) culled range and azimuth offset estimates (FCOMPLEX, enter - for none)\n    coffsets:\n        (output) culled offset estimates and cross-correlation values (text format, enter - for none)\n    thres:\n        cross-correlation threshold (enter - for default from OFF_par)\n    npoly:\n        number of model polynomial parameters (enter - for default, 1, 3, 4, 6, default: 4)\n    interact_mode:\n        interactive culling of input data (enter - for default)\n            * 0: off (default)\n            * 1: on\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_fit', offs, ccp, OFF_par, coffs, coffsets, thres, npoly, interact_mode]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_pwr(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, rwin='-', azwin='-', offsets='-', n_ovr='-', nr='-', naz='-', thres='-', lanczos='-', bw_frac='-', deramp='-', int_filt='-', pflag='-', pltflg='-', ccs='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Offset estimation between SLC images using intensity cross-correlation\n    | Copyright 2023, Gamma Remote Sensing, v5.8 clw/cm 18-Apr-2023\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) single-look complex image 1 (reference)\n    SLC2:\n        (input) single-look complex image 2\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    offs:\n        (output) offset estimates in range and azimuth (FCOMPLEX)\n    ccp:\n        (output) cross-correlation of each patch (0.0->1.0) (FLOAT)\n    rwin:\n        range patch size (range pixels, enter - for default from offset parameter file)\n    azwin:\n        azimuth patch size (azimuth lines, enter - for default from offset parameter file)\n    offsets:\n        (output) range and azimuth offsets and cross-correlation data in text format, enter - for no output\n    n_ovr:\n        SLC oversampling factor (integer 2\\\\*\\\\*N (1,2,4), enter - for default: 2)\n    nr:\n        number of offset estimates in range direction (enter - for default from offset parameter file)\n    naz:\n        number of offset estimates in azimuth direction (enter - for default from offset parameter file)\n    thres:\n        cross-correlation threshold (0.0->1.0) (enter - for default from offset parameter file)\n    lanczos:\n        Lanczos interpolator order 5 -> 9 (enter - for default: 5)\n    bw_frac:\n        bandwidth fraction of low-pass filter on complex data (0.0->1.0) (enter - for default: 1.0)\n    deramp:\n        deramp SLC phase flag (enter - for default)\n            * 0: no deramp (Doppler centroid close to 0) (default)\n            * 1: deramp SLC phase\n    \n    int_filt:\n        intensity low-pass filter flag (enter - for default)\n            * 0: no filter\n            * 1: low-pass filter of intensity data, highly recommended when no oversampling used (default)\n    \n    pflag:\n        print flag (enter - for default)\n            * 0: print offset summary (default)\n            * 1: print all offset data\n    \n    pltflg:\n        plotting flag (enter - for default)\n            * 0: none (default)\n            * 1: screen output\n            * 2: screen output and PNG format plots\n            * 3: output plots in PDF format\n    \n    ccs:\n        (output) cross-correlation standard deviation of each patch (FLOAT) (enter - for none)\n            * NOTE: ScanSAR and TOPS data need to be previously deramped\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_pwr', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, rwin, azwin, offsets, n_ovr, nr, naz, thres, lanczos, bw_frac, deramp, int_filt, pflag, pltflg, ccs]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_pwr_tracking(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, rwin='-', azwin='-', offsets='-', n_ovr='-', thres='-', rstep='-', azstep='-', rstart='-', rstop='-', azstart='-', azstop='-', lanczos='-', bw_frac='-', deramp='-', int_filt='-', pflag='-', pltflg='-', ccs='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Offset tracking between SLC images using intensity cross-correlation\n    | Copyright 2023, Gamma Remote Sensing, v6.4 clw/cm 18-Apr-2023\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) single-look complex image 1 (reference)\n    SLC2:\n        (input) single-look complex image 2\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    offs:\n        (output) offset estimates in range and azimuth (FCOMPLEX)\n    ccp:\n        (output) cross-correlation of each patch (0.0->1.0) (FLOAT)\n    rwin:\n        range patch size (range pixels, enter - for default from offset parameter file)\n    azwin:\n        azimuth patch size (azimuth lines, enter - for default from offset parameter file)\n    offsets:\n        (output) range and azimuth offsets and cross-correlation data in text format, enter - for no output\n    n_ovr:\n        SLC oversampling factor (integer 2\\\\*\\\\*N (1,2,4), enter - for default: 2)\n    thres:\n        cross-correlation threshold (0.0->1.0) (enter - for default from offset parameter file)\n    rstep:\n        step in range pixels (enter - for default: rwin/2)\n    azstep:\n        step in azimuth pixels (enter - for default: azwin/2)\n    rstart:\n        offset to starting range pixel (enter - for default: 0)\n    rstop:\n        offset to ending range pixel (enter - for default: nr-1)\n    azstart:\n        offset to starting azimuth line (enter - for default: 0)\n    azstop:\n        offset to ending azimuth line (enter - for default: nlines-1)\n    lanczos:\n        Lanczos interpolator order 5 -> 9 (enter - for default: 5)\n    bw_frac:\n        bandwidth fraction of low-pass filter on complex data (0.0->1.0) (enter - for default: 1.0)\n    deramp:\n        deramp SLC phase flag (enter - for default)\n            * 0: no deramp (Doppler centroid close to 0) (default)\n            * 1: deramp SLC phase\n    \n    int_filt:\n        intensity low-pass filter flag (enter - for default)\n            * 0: no filter\n            * 1: low-pass filter of intensity data, highly recommended when no oversampling used (default)\n    \n    pflag:\n        print flag (enter - for default)\n            * 0: print offset summary (default)\n            * 1: print all offset data\n    \n    pltflg:\n        plotting flag (enter - for default)\n            * 0: none (default)\n            * 1: screen output\n            * 2: screen output and PNG format plots\n            * 3: output plots in PDF format\n    \n    ccs:\n        (output) cross-correlation standard deviation of each patch (FLOAT) (enter - for none)\n            * NOTE: ScanSAR and TOPS data need to be previously deramped\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_pwr_tracking', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, rwin, azwin, offsets, n_ovr, thres, rstep, azstep, rstart, rstop, azstart, azstop, lanczos, bw_frac, deramp, int_filt, pflag, pltflg, ccs]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_pwr_tracking2(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, OFF_par2='-', offs2='-', rwin='-', azwin='-', offsets='-', n_ovr='-', thres='-', rstep='-', azstep='-', rstart='-', rstop='-', azstart='-', azstop='-', bw_frac='-', deramp='-', int_filt='-', pflag='-', pltflg='-', ccs='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Intensity cross-correlation offset tracking with the initial offset for each patch determined from input offset map\n    | Copyright 2023, Gamma Remote Sensing, v2.1 clw/cm 18-Apr-2023\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) single-look complex image 1 (reference)\n    SLC2:\n        (input) single-look complex image 2\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    offs:\n        (output) offset estimates in range and azimuth (FCOMPLEX)\n    ccp:\n        (output) cross-correlation of each patch (0.0->1.0) (FLOAT)\n    OFF_par2:\n        (input) ISP offset/interferogram parameter file of the offset map to determine initial offsets (enter - for none)\n    offs2:\n        (input) input range and azimuth offset map to determine initial offsets (enter - for none)\n    rwin:\n        range patch size (range pixels, enter - for default from offset parameter file)\n    azwin:\n        azimuth patch size (azimuth lines, enter - for default from offset parameter file)\n    offsets:\n        (output) range and azimuth offsets and cross-correlation data in text format, enter - for no output\n    n_ovr:\n        SLC oversampling factor (integer 2\\\\*\\\\*N (1,2,4), enter - for default: 2)\n    thres:\n        cross-correlation threshold (0.0->1.0) (enter - for default from offset parameter file)\n    rstep:\n        step in range pixels (enter - for default: rwin/2)\n    azstep:\n        step in azimuth pixels (enter - for default: azwin/2)\n    rstart:\n        offset to starting range pixel (enter - for default: 0)\n    rstop:\n        offset to ending range pixel (enter - for default: nr-1)\n    azstart:\n        offset to starting azimuth line (enter - for default: 0)\n    azstop:\n        offset to ending azimuth line (enter - for default: nlines-1)\n    bw_frac:\n        bandwidth fraction of low-pass filter on complex data (0.0->1.0) (enter - for default: 1.0)\n    deramp:\n        deramp SLC phase flag (enter - for default)\n            * 0: no deramp (Doppler centroid close to 0) (default)\n            * 1: deramp SLC phase\n    \n    int_filt:\n        intensity low-pass filter flag (enter - for default)\n            * 0: no filter\n            * 1: low-pass filter of intensity data, highly recommended when no oversampling used (default)\n    \n    pflag:\n        print flag (enter - for default)\n            * 0: print offset summary (default)\n            * 1: print all offset data\n    \n    pltflg:\n        plotting flag (enter - for default)\n            * 0: none (default)\n            * 1: screen output\n            * 2: screen output and PNG format plots\n            * 3: output plots in PDF format\n    \n    ccs:\n        (output) cross-correlation standard deviation of each patch (FLOAT) (enter - for none)\n            * NOTE: ScanSAR and TOPS data need to be previously deramped\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_pwr_tracking2', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, ccp, OFF_par2, offs2, rwin, azwin, offsets, n_ovr, thres, rstep, azstep, rstart, rstop, azstart, azstop, bw_frac, deramp, int_filt, pflag, pltflg, ccs]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_pwr_tracking_polygons(SLC_par, OFF_par, rlks, azlks, rwin, azwin, polygons, rstep='-', azstep='-', rstart='-', rstop='-', azstart='-', azstop='-', rb='-', azb='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Offset tracking polygon calculation in MLI coordinates\n    | Copyright 2023, Gamma Remote Sensing, v1.2 18-Apr-2023 cw\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) reference SLC ISP image parameter file\n    OFF_par:\n        (input/output) ISP offset/interferogram parameter file\n    rlks:\n        range decimation factor for MLI geometry  (enter - for default: 1)\n    azlks:\n        azimuth decimation factor for the MLI geometry (enter - for default: 1)\n    rwin:\n        range patch size (range pixels, enter - for default from offset parameter file)\n    azwin:\n        azimuth patch size (azimuth lines, enter - for default from offset parameter file)\n    polygons:\n        (output) polygon vertices in text format\n    rstep:\n        step in range pixels (enter - for default: rwin/2)\n    azstep:\n        step in azimuth pixels (enter - for default: azwin/2)\n    rstart:\n        offset to starting range pixel (enter - for default: 0)\n    rstop:\n        offset to ending range pixel (enter - for default: nr-1)\n    azstart:\n        offset to starting azimuth line (enter - for default: 0)\n    azstop:\n        offset to ending azimuth line (enter - for default: nlines-1)\n    rb:\n        polygon range border in MLI samples: (enter - for default: 7)\n    azb:\n        polygon azimuth border in MLI lines: (enter - for default: 7)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_pwr_tracking_polygons', SLC_par, OFF_par, rlks, azlks, rwin, azwin, polygons, rstep, azstep, rstart, rstop, azstart, azstop, rb, azb]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_SLC(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, snr, rwin='-', azwin='-', offsets='-', n_ovr='-', nr='-', naz='-', thres='-', ISZ='-', pflag='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Offsets between SLC images using fringe visibility\n    | Copyright 2023, Gamma Remote Sensing, v3.1 18-Apr-2023 clw\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) single-look complex image 1 (reference)\n    SLC2:\n        (input) single-look complex image 2\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    offs:\n        (output) offset estimates (FCOMPLEX)\n    snr:\n        (output) offset estimation SNR (FLOAT)\n    rwin:\n        search window size (range pixels) (enter - for default from offset parameter file)\n    azwin:\n        search window size (azimuth lines) (enter - for default from offset parameter file)\n    offsets:\n        (output) range and azimuth offsets and SNR data in text format, enter - for no output\n    n_ovr:\n        SLC oversampling factor (integer 2\\\\*\\\\*N (1,2,4) enter - for default: 2)\n    nr:\n        number of offset estimates in range direction (enter - for default from offset parameter file)\n    naz:\n        number of offset estimates in azimuth direction (enter - for default from offset parameter file)\n    thres:\n        offset estimation quality threshold (enter - for default from offset parameter file)\n    ISZ:\n        search chip interferogram size (in non-oversampled pixels, enter - for default: 16)\n    pflag:\n        print flag (enter - for default)\n            * 0: print offset summary (default)\n            * 1: print all offset data\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_SLC', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, snr, rwin, azwin, offsets, n_ovr, nr, naz, thres, ISZ, pflag]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_SLC_tracking(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, snr, rsw='-', azsw='-', offsets='-', n_ovr='-', thres='-', rstep='-', azstep='-', rstart='-', rstop='-', azstart='-', azstop='-', ISZ='-', pflag='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Offset tracking between SLC images using fringe visibility\n    | Copyright 2023, Gamma Remote Sensing, v3.8 18-Apr-2023 clw\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) single-look complex image 1 (reference)\n    SLC2:\n        (input) single-look complex image 2\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    offs:\n        (output) offset estimates (FCOMPLEX)\n    snr:\n        (output) offset estimation SNR (FLOAT)\n    rsw:\n        range search window size (range pixels) (enter - for default from offset parameter file)\n    azsw:\n        azimuth search window size (azimuth lines) (enter - for default from offset parameter file)\n    offsets:\n        (output) range and azimuth offsets and SNR data in text format, enter - for no output\n    n_ovr:\n        SLC over-sampling factor (integer 2\\\\*\\\\*N (1,2,4) enter - for default: 2)\n    thres:\n        offset estimation quality threshold (enter - for default from offset parameter file)\n    rstep:\n        step in range pixels (enter - for default: rsw/2)\n    azstep:\n        step in azimuth pixels (enter - for default: azsw/2)\n    rstart:\n        starting range pixel (enter - for default: rsw/2)\n    rstop:\n        ending range pixel (enter - for default: nr - rsw/2)\n    azstart:\n        starting azimuth line (enter - for default: azsw/2)\n    azstop:\n        ending azimuth line (enter - for default: nlines - azsw/2)\n    ISZ:\n        search chip interferogram size (in non-oversampled pixels, enter - for default: 16)\n    pflag:\n        print flag (enter - for default)\n            * 0: print offset summary (default)\n            * 1: print all offset data\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_SLC_tracking', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, offs, snr, rsw, azsw, offsets, n_ovr, thres, rstep, azstep, rstart, rstop, azstart, azstop, ISZ, pflag]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_sub(offs, OFF_par, offs_sub, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Subtraction of polynomial from range and azimuth offset estimates\n    | Copyright 2017, Gamma Remote Sensing, v1.0 27-Mar-2017 cm\n    \n    Parameters\n    ----------\n    offs:\n        (input) range and azimuth offset estimates (fcomplex)\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    offs_sub:\n        (output) range and azimuth offset estimates after polynomial subtraction (fcomplex)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_sub', offs, OFF_par, offs_sub]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_tracking(offs, ccp, SLC_par, OFF_par, disp_map, disp_val='-', mode='-', thres='-', poly_flag='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Conversion of range and azimuth offsets files to displacement map\n    | Copyright 2017, Gamma Remote Sensing, v2.0 4-Apr-2017 ts/clw/uw\n    \n    Parameters\n    ----------\n    offs:\n        (input) range and azimuth offset estimates (fcomplex)\n    ccp:\n        (input) cross-correlation of the offset estimates (float)\n    SLC_par:\n        (input) SLC parameter file of reference SLC\n    OFF_par:\n        (input) offset parameter file used in the offset tracking\n    disp_map:\n        (output) range and azimuth displacement estimates (fcomplex)\n    disp_val:\n        (output) range and azimuth displacement estimates and cross-correlation values (enter - for none) (text)\n    mode:\n        flag indicating displacement mode:\n            * 0: displacement in range and azimuth pixels\n            * 1: displacement in meters in slant range and azimuth directions\n            * 2: displacement in meters in ground range and azimuth directions (default)\n    \n    thres:\n        cross-correlation threshold to accept offset value (default from OFF_par)\n    poly_flag:\n        flag indicating if trend calculated using offset polynomials from OFF_par is subtracted:\n            * 0: do not subtract polynomial trend from offset data\n            * 1: subtract polynomial trend from offset data (default)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/offset_tracking', offs, ccp, SLC_par, OFF_par, disp_map, disp_val, mode, thres, poly_flag]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ORB_filt(SLC_par_in, SLC_par_out, interval='-', extra='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Filter state vectors using a least-squares polynomial model\n    | Copyright 2020, Gamma Remote Sensing, v1.3 20-May-2020 clw/cm\n    \n    Parameters\n    ----------\n    SLC_par_in:\n        (input) ISP image parameter file at least 5 state vectors\n    SLC_par_out:\n        (output) ISP image parameter file with state vectors filtered using least-squares\n    interval:\n        time interval between state vectors (enter - for default: state vector time interval in SLC_par)\n    extra:\n        extra time for state vectors at start and end of image (sec.) (enter - for default: 5.0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ORB_filt', SLC_par_in, SLC_par_out, interval, extra]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ORB_prop_SLC(SLC_par, nstate='-', interval='-', extra='-', mode='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate state vectors using orbit propagation and interpolation\n    | Copyright 2022, Gamma Remote Sensing, v2.0 1-Feb-2022 clw/awi/cm\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) ISP image parameter file with at least 1 state vector\n    nstate:\n        number of state vectors to calculate (enter - for default: nstate from image duration + extra)\n    interval:\n        time interval between state vectors (enter - for default: state vector time interval in SLC_par)\n    extra:\n        extra time for state vectors at start and end of image (sec.) (enter - for default: 30.0)\n    mode:\n        orbit propagation mode:\n            * 0: polynomial interpolation (default, if 3 or more state vectors available)\n            * 1: integration of the equations of motion (default, if less than 3 state vectors available)\n            * 2: interpolate between state vectors, minimum of 3 state vectors;\n              interpolation of the equations of motion outside of the time span of the existing state vectors\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ORB_prop_SLC', SLC_par, nstate, interval, extra, mode]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ORRM_vec(SLC_par, ORRM, nstate='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate state vectors extraction from ORRM file\n    | Copyright 2023, Gamma Remote Sensing, v1.5 19-Apr-2023 clw\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input/output) ISP SLC/MLI image parameter file\n    ORRM:\n        (input) ORRM state vector file\n    nstate:\n        number of state vectors (enter - for default: 5, maximum: 1024)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ORRM_vec', SLC_par, ORRM, nstate]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ACS_ERS(CEOS_SAR_leader, SLC_par, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file generation for ERS SLC data from the ACS processor\n    | Copyright 2020, Gamma Remote Sensing, v1.4 3-Sep-2020 clw/uw/cm\n    \n    Parameters\n    ----------\n    CEOS_SAR_leader:\n        (input) ERS CEOS SAR leader file\n    SLC_par:\n        (output) ISP SLC parameter file (example <orbit>.slc.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ACS_ERS', CEOS_SAR_leader, SLC_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ASAR(ASAR_ERS_file, output_name, K_dB='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Extract SLC/MLI image parameters and images from ENVISAT ASAR SLC, WSS, APP, and PRI products\n    | Copyright 2023, Gamma Remote Sensing, v2.9 20-Oct-2023 clw/uw/awi/cm\n    \n    Parameters\n    ----------\n    ASAR_ERS_file:\n        (input) ASAR or ERS data in ASAR format (SAR_IMS_1P) including header and image as provided by ESA\n    output_name:\n        (output) common part of output file names (e.g. YYYMMDD date)\n    K_dB:\n        Calibration factor in dB (nominal value for all ASAR modes: 55.0)\n            * NOTE: Use - to use the calibration factor provided in the ASAR file header\n            * NOTE: In the case that a calibration factor is specified on the command line, PRI images are converted\n              to radiometrically calibrated ground-range intensity images in float format\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASAR', ASAR_ERS_file, output_name, K_dB]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ASF_91(CEOS_leader, CEOS_trailer, SLC_par, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | SLC parameter file for data data from theAlaska SAR Facility (1991-1996)\n    | Copyright 2020, Gamma Remote Sensing, v3.4 3-Sep-2020 clw/uw/cm\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) ASF CEOS leader file\n    CEOS_trailer:\n        (input) ASF CEOS trailer file\n    SLC_par:\n        (output) ISP SLC image parameter file\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASF_91', CEOS_leader, CEOS_trailer, SLC_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ASF_96(CEOS_SAR_leader, SLC_par, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file for ASF data 1996-->present v1.1\n    | Copyright 2020, Gamma Remote Sensing, v1.4 3-Sep-2020 clw/uw/cm\n    \n    Parameters\n    ----------\n    CEOS_SAR_leader:\n        (input) CEOS SAR leader file\n    SLC_par:\n        (output) ISP SLC parameter file (example <orbit>.slc.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASF_96', CEOS_SAR_leader, SLC_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ASF_PRI(CEOS_leader, CEOS_data, GRD_par, GRD, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file for ASF detected ground range images (L1) Sep 1996 --> present\n    | Copyright 2021, Gamma Remote Sensing, v1.5 14-Jun-2021 clw/uw/cm\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) CEOS leader file\n    CEOS_data:\n        (input) CEOS data file binary\n    GRD_par:\n        (output) ISP ground range image parameter file\n    GRD:\n        (output) ISP ground range image (enter - for none, FLOAT intensity)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASF_PRI', CEOS_leader, CEOS_data, GRD_par, GRD]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ASF_RSAT_SS(CEOS_leader, CEOS_data, GRD_par, GRD, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file for ASF Radarsat-1 SCANSAR images\n    | Copyright 2020, Gamma Remote Sensing, v1.1 3-Sep-2020 clw/uw/cm\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) CEOS leader file (Radarsat-1 SCANSAR)\n    CEOS_data:\n        (input) CEOS data file (Radarsat-1 SCANSAR)\n    GRD_par:\n        (output) ISP image parameter file (example <orbit>.mli.par)\n    GRD:\n        (output) ISP image (example <orbit>.mli) (enter -  for none, short integer)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASF_RSAT_SS', CEOS_leader, CEOS_data, GRD_par, GRD]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ASF_SLC(CEOS_leader, SLC_par, CEOS_data='-', SLC='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC image parameter file and reformat data\n    | Copyright 2023, Gamma Remote Sensing, v1.1 18-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) CEOS SAR leader file\n    SLC_par:\n        (output) ISP SLC parameter file (example <date>.slc.par)\n    CEOS_data:\n        (input) CEOS data file (example: dat_01.001) (enter - for none)\n    SLC:\n        (output) SLC data with file and line headers removed (example: <date>.slc) (enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASF_SLC', CEOS_leader, SLC_par, CEOS_data, SLC]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ASNARO2(CEOS_data, CEOS_leader, SLC_par, SLC='-', reramp='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for ASNARO-2 Spotlight, Stripmap and ScanSAR level 1.1 data\n    | Copyright 2023, Gamma Remote Sensing, v1.4 15-Jun-2023 cm/uw\n    \n    Parameters\n    ----------\n    CEOS_data:\n        (input) CEOS format SLC data (IMG-PP-AS2\\\\*)\n    CEOS_leader:\n        (input) CEOS SAR leader file for ASNARO-2 data (LED-AS2\\\\*)\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd_pp.slc.par)\n    SLC:\n        (output) SLC (Spotlight and Stripmap) or SLI (ScanSAR) data file (enter - for none, example: yyyymmdd_pp.slc)\n    reramp:\n        reramp SLC phase flag (enter - for default)\n            * 0: no reramp\n            * 1: reramp SLC phase (default)\n            * NOTE: ASNARO2 geocoded and georeferenced data in GeoTIFF format (level 1.5) can be read using par_ASNARO2_geo program.\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ASNARO2', CEOS_data, CEOS_leader, SLC_par, SLC, reramp]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ATLSCI_ERS(CEOS_SAR_leader, CEOS_Image, SLC_par, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file for ATL-SCI ERS SLC data\n    | Copyright 2020, Gamma Remote Sensing, v2.9 21-Sep-2020 clw/cm\n    \n    Parameters\n    ----------\n    CEOS_SAR_leader:\n        (input) CEOS SAR leader file (LEA_01.001)\n    CEOS_Image:\n        (input) CEOS image data segment (DAT_01.001)\n    SLC_par:\n        (output) ISP SLC parameter file (example <orbit>.slc.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ATLSCI_ERS', CEOS_SAR_leader, CEOS_Image, SLC_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_Capella_SLC(GeoTIFF, ext_JSON, SLC_par, SLC='-', radcal='-', noise='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for Capella SLC data\n    | Copyright 2025, Gamma Remote Sensing, v2.0 28-Apr-2025 cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) Capella image data file in GeoTIFF format (\\\\*.tif)\n    ext_JSON:\n        (input) Capella extended metadata file in JSON format (\\\\*_extended.json)\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd.slc.par)\n    SLC:\n        (output) SLC data file (enter - for none, example: yyyymmdd.slc)\n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: beta0 (default)\n            * 1: sigma0\n    \n    noise:\n        noise levels flag (enter - for default)\n            * 0: do not use noise levels (default)\n            * 1: use noise levels\n            * NOTE: Capella terrain geocoded data in GeoTIFF format can be read using par_Capella_geo program\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_Capella_SLC', GeoTIFF, ext_JSON, SLC_par, SLC, radcal, noise]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_CS_DGM(HDF5, trunk, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ISP MLI parameter and image files for COSMO-Skymed DGM data\n    | Copyright 2024, Gamma Remote Sensing, v1.1 12-Sep-2024 cm/awi/ms/cw/uw\n    \n    Parameters\n    ----------\n    HDF5:\n        (input) COSMO-Skymed DGM data file in HDF5 format\n    trunk:\n        (output) output file name trunk used for output filenames \n            (example: yyyymmdd -> yyyymmdd_pol.mli yyyymmdd_pol.mli.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_CS_DGM', HDF5, trunk]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_CS_SLC(HDF5, trunk, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ISP SLC parameter and image files for Cosmo-Skymed SCS data\n    | Copyright 2024, Gamma Remote Sensing, v2.2 12-Sep-2024 awi/ms/cw/uw\n    \n    Parameters\n    ----------\n    HDF5:\n        (input) SCS data file in HDF5 format\n    trunk:\n        (output) output file name trunk used for output filenames \n            (example: yyyymmdd -> yyyymmdd_pol_beamid.slc yyyymmdd_pol_beamid.slc.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_CS_SLC', HDF5, trunk]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_CS_SLC_TIF(GeoTIFF, XML, trunk, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ISP SLC parameter and image files for Cosmo Skymed SCS data in GeoTIFF format\n    | Copyright 2023, Gamma Remote Sensing, v1.6 16-May-2023 awi/ms/clw/cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) SCS data file in GeoTIFF format\n    XML:\n        (input) SCS meta data file in XML format\n    trunk:\n        (output) output file name trunk used for output filenames \n            (example: yyyymmdd -> yyyymmdd_pol_beamid.slc yyyymmdd_pol_beamid.slc.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_CS_SLC_TIF', GeoTIFF, XML, trunk]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_CSG_DGM(HDF5, trunk, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ISP MLI parameter and image files for COSMO-Skymed Second Generation DGM data\n    | Copyright 2024, Gamma Remote Sensing, v1.1 12-Sep-2024 cm/awi/ms/cw/uw\n    \n    Parameters\n    ----------\n    HDF5:\n        (input) COSMO-Skymed Second Generation DGM data file in HDF5 format\n    trunk:\n        (output) output file name trunk used for output filenames \n            (example: yyyymmdd -> yyyymmdd_pol.mli yyyymmdd_pol.mli.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_CSG_DGM', HDF5, trunk]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_CSG_SLC(HDF5, trunk, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ISP SLC parameter and image files for COSMO-Skymed Second Generation SCS data\n    | Copyright 2024, Gamma Remote Sensing, v1.3 12-Sep-2024 cm/awi/ms/cw/uw\n    \n    Parameters\n    ----------\n    HDF5:\n        (input) SCS data file in HDF5 format\n    trunk:\n        (output) output file name trunk used for output filenames \n            (example: yyyymmdd -> yyyymmdd_pol_beamid.slc yyyymmdd_pol_beamid.slc.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_CSG_SLC', HDF5, trunk]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_EORC_JERS_SLC(CEOS_SAR_leader, SLC_par, CEOS_data='-', SLC='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Reformat EORC processed JERS-1 SLC and generate the ISP parameter file\n    | Copyright 2023, Gamma Remote Sensing, v1.6 18-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    CEOS_SAR_leader:\n        (input) CEOS SAR leader file for JERS SLC processed by EORC\n    SLC_par:\n        (output) ISP image parameter file\n    CEOS_data:\n        (input) CEOS format SLC data (IMOP_01.DAT, enter - for none)\n    SLC:\n        (output) reformated JERS SLC (example: yyyymmdd.SLC, enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_EORC_JERS_SLC', CEOS_SAR_leader, SLC_par, CEOS_data, SLC]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_EORC_PALSAR(CEOS_leader, SLC_par, CEOS_data, SLC='-', dtype='-', sc_dB='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC image and parameter files for PALSAR, PALSAR-2, and PALSAR-3 level 1.1 SLC data produced by EORC/JAXA and ESA\n    | Copyright 2025, Gamma Remote Sensing, v3.9 12-Jun-2025 clw/cm\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) CEOS leader file for PALSAR, PALSAR-2, or PALSAR-3 Level 1.1 SLC data (LED...)\n    SLC_par:\n        (output) ISP image parameter file (example: yyyymmdd.slc.par)\n    CEOS_data:\n        (input) PALSAR CEOS format Level 1.1 SLC (IMG...)\n    SLC:\n        (output) reformatted PALSAR SLC (example: yyyymmdd.slc, enter - for none)\n    dtype:\n        output data type (enter - for default)\n            * 0: FCOMPLEX (default)\n            * 1: SCOMPLEX\n    \n    sc_dB:\n        scale factor for FCOMPLEX -> SCOMPLEX, (enter - for default: HH,VV (dB): 60.0000, VH,HV: 70.0000)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_EORC_PALSAR', CEOS_leader, SLC_par, CEOS_data, SLC, dtype, sc_dB]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_EORC_PALSAR_ScanSAR(CEOS_data, CEOS_leader, SLC_par, SLC='-', TOPS_par='-', afmrate='-', shift='-', reramp='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files from EORC PALSAR2 ScanSAR burst SLC data in CEOS format\n    | Copyright 2023, Gamma Remote Sensing, v1.4 18-Apr-2023 cm/clw\n    \n    Parameters\n    ----------\n    CEOS_data:\n        (input) CEOS image file for a PALSAR2 ScanSAR burst data subswath (IMG...)\n    CEOS_leader:\n        (input) CEOS leader file for PALSAR2 ScanSAR burst data (LED...)\n    SLC_par:\n        (output) ISP image parameter file (example: yyyymmdd_b1_hh.slc.par)\n    SLC:\n        (output) SLC data file (enter - for none, example: yyyymmdd_b1_hh.slc)\n    TOPS_par:\n        (output) SLC burst annotation file (enter - for none, example: yyyymmdd_b1_hh.slc.tops_par)\n    afmrate:\n        azimuth FM rate estimation method (enter - for default)\n            * 0: beam velocity on the ground\n            * 1: platform velocity (default)\n    \n    shift:\n        shift azimuth spectrum by fs/2 (enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    reramp:\n        reramp data using Doppler centroid and azimuth FM rate estimate (enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_EORC_PALSAR_ScanSAR', CEOS_data, CEOS_leader, SLC_par, SLC, TOPS_par, afmrate, shift, reramp]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ERSDAC_PALSAR(ERSDAC_SLC_par, SLC_par, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate the ISP image parameter file from ERSDAC PALSAR level 1.1 SLC data\n    | Copyright 2023, Gamma Remote Sensing, v1.7 5-Jun-2023 clw\n    \n    Parameters\n    ----------\n    ERSDAC_SLC_par:\n        (input) ERSDAC SLC parameter file Level 1.1 (PASL11\\\\*.SLC.par)\n    SLC_par:\n        (output) ISP image parameter file (example: yyyymmdd.slc.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ERSDAC_PALSAR', ERSDAC_SLC_par, SLC_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ESA_ERS(CEOS_SAR_leader, SLC_par, inlist, CEOS_DAT='-', SLC='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file generation for ERS SLC data from the PGS, VMP, and SPF processors\n    | Copyright 2020, Gamma Remote Sensing, v1.5 21-Sep-2020 clw/uw/cm\n    \n    Parameters\n    ----------\n    CEOS_SAR_leader:\n        (input) ERS CEOS SAR leader file\n    SLC_par:\n        (output) ISP SLC parameter file (example: <date>.slc.par)\n    inlist:\n        a list of arguments to be passed to stdin\n    CEOS_DAT:\n        (input) CEOS data file (example: DAT_01.001)\n    SLC:\n        (output) SLC data with file and line headers removed (example: <date>.slc)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ESA_ERS', CEOS_SAR_leader, SLC_par, CEOS_DAT, SLC]\n    process(cmd, logpath=logpath, outdir=outdir, inlist=inlist, shellscript=shellscript)\n\n\ndef par_ESA_JERS_SEASAT_SLC(CEOS_data, CEOS_leader, SLC_par, SLC='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for ESA-provided JERS and SEASAT SLC data\n    | Copyright 2023, Gamma Remote Sensing, v1.4 15-Jun-2023 cm/clw/ts\n    \n    Parameters\n    ----------\n    CEOS_data:\n        (input) CEOS format SLC data (DAT_01.001)\n    CEOS_leader:\n        (input) CEOS SAR leader file for JERS SLC processed by ESA (LEA_01.001)\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd.slc.par)\n    SLC:\n        (output) SLC data file (enter - for none, example: yyyymmdd.slc)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ESA_JERS_SEASAT_SLC', CEOS_data, CEOS_leader, SLC_par, SLC]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ESA_PALSAR_GDH(CEOS_data, CEOS_leader, MLI_par, MLI='-', GRD_par='-', GRD='-', rps='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate MLI and GRD image and parameter files for PALSAR + PALSAR2 level 1.5 GDH data provided by ESA\n    | Copyright 2023, Gamma Remote Sensing, v1.4 5-Jun-2023 clw/cm\n    \n    Parameters\n    ----------\n    CEOS_data:\n        (input) CEOS image file for PALSAR or PALSAR-2 Level 1.5 GDH data (IMG...)\n    CEOS_leader:\n        (input) CEOS leader file for PALSAR or PALSAR-2 Level 1.5 GDH data (LED...)\n    MLI_par:\n        (output) MLI parameter file (example: yyyymmdd_pp.mli.par)\n    MLI:\n        (output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)\n    GRD_par:\n        (output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)\n    GRD:\n        (output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)\n    rps:\n        slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ESA_PALSAR_GDH', CEOS_data, CEOS_leader, MLI_par, MLI, GRD_par, GRD, rps]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_Fucheng_SLC(GeoTIFF, annotation_XML, calibration_XML, noise_XML, SLC_par, SLC='-', dtype='-', radcal='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for Spacety Fucheng SLC data\n    | Copyright 2024, Gamma Remote Sensing, v1.1 7-Jun-2024 cm/clw/awi\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) image data file in \\\\*.tiff GeoTIFF format (enter - for default: none)\n    annotation_XML:\n        (input) Fucheng XML annotation file\n    calibration_XML:\n        (input) Fucheng radiometric calibration XML file to generate output as sigma0\n            (enter - for default: return uncalibrated digital numbers)\n    noise_XML:\n        (input) Fucheng noise XML file (enter - for default: no subtraction of thermal noise power)\n    SLC_par:\n        (output) SLC parameter file (e.g.: yyyymmdd_vv.slc.par)\n    SLC:\n        (output) SLC data file (enter - for default: none, e.g.: yyyymmdd_vv.slc)\n    dtype:\n        output data type (enter - for default)\n            * 0: FCOMPLEX (default)\n            * 1: SCOMPLEX\n    \n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: none\n            * 1: Beta Nought\n            * 2: Sigma Nought (default)\n            * 3: Gamma\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_Fucheng_SLC', GeoTIFF, annotation_XML, calibration_XML, noise_XML, SLC_par, SLC, dtype, radcal]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_GF3_SLC(GeoTIFF, annotation_XML, SLC_par, SLC='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter file and SLC image from a Gaofen-3 data set in GeoTIFF format\n    | Copyright 2023, Gamma Remote Sensing, v1.3 14-Jun-2023 cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) Gaofen-3 data file in GeoTIFF format (\\\\*.tiff) (enter - for none)\n    annotation_XML:\n        (input) Gaofen-3 annotation file in XML format (\\\\*.meta.xml)\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd.slc.par)\n    SLC:\n        (output) ISP SLC data file (example: yyyymmdd.slc) (enter - for none, SLC output will not be produced)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_GF3_SLC', GeoTIFF, annotation_XML, SLC_par, SLC]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_HISEA1_SLC(GeoTIFF, annotation_XML, calibration_XML, SLC_par, SLC='-', dtype='-', sc_dB='-', shift='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for Hisea-1 SLC data\n    | Copyright 2023, Gamma Remote Sensing, v1.4 11-May-2023 awi/cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) image data file in GeoTIFF format (enter - for none, \\\\*.tiff)\n    annotation_XML:\n        (input) Hisea-1 L1 XML annotation file\n    calibration_XML:\n        (input) Hisea-1 L1 radiometric calibration XML file (enter - for no radiometric calibration)\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd_vv.slc.par)\n    SLC:\n        (output) SLC data file (enter - for none, example: yyyymmdd_vv.slc)\n    dtype:\n        output data type (enter - for default)\n            * 0: FCOMPLEX (default)\n            * 1: SCOMPLEX\n    \n    sc_dB:\n        scale factor for FCOMPLEX -> SCOMPLEX, (enter - for default: HH,VV (dB): 60.0000,  VH,HV: 70.0000)\n    shift:\n        shift azimuth spectrum by fs/2 (enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_HISEA1_SLC', GeoTIFF, annotation_XML, calibration_XML, SLC_par, SLC, dtype, sc_dB, shift]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_HT1_SLC(GeoTIFF, annotation_XML, SLC_par, SLC='-', dtype='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for HT1 / Hongtu-1 / PIESAT-1 SLC data\n    | Copyright 2024, Gamma Remote Sensing, v1.0 5-Jun-2024 cm/clw/awi\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) image data file in \\\\*.tiff GeoTIFF format (enter - for default: none)\n    annotation_XML:\n        (input) HT1 XML annotation file\n    SLC_par:\n        (output) SLC parameter file (e.g.: yyyymmdd_vv.slc.par)\n    SLC:\n        (output) SLC data file (enter - for default: none, e.g.: yyyymmdd_vv.slc)\n    dtype:\n        output data type (enter - for default)\n            * 0: FCOMPLEX (default)\n            * 1: SCOMPLEX\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_HT1_SLC', GeoTIFF, annotation_XML, SLC_par, SLC, dtype]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ICEYE_GRD(GeoTIFF, XML, MLI_par, MLI='-', GRD_par='-', GRD='-', rps='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate MLI and GRD image and parameter files for ICEYE GRD data\n    | Copyright 2024, Gamma Remote Sensing, v1.4 13-Jun-2024 cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) ICEYE GRD data file in GeoTIFF format (enter - for none, \\\\*.tif)\n    XML:\n        (input) ICEYE XML annotation file\n    MLI_par:\n        (output) MLI parameter file (example: yyyymmdd.mli.par)\n    MLI:\n        (output) MLI data file in slant range geometry (example: yyyymmdd.mli, enter - for none)\n    GRD_par:\n        (output) GRD parameter file (example: yyyymmdd.grd.par, enter - for none)\n    GRD:\n        (output) GRD data file (example: yyyymmdd.grd, enter - for none)\n    rps:\n        slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ICEYE_GRD', GeoTIFF, XML, MLI_par, MLI, GRD_par, GRD, rps]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_ICEYE_SLC(HDF5, SLC_par, SLC='-', dtype='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ISP SLC parameter and binary files for ICEYE SLC data\n    | Copyright 2024, Gamma Remote Sensing, v1.9 28-Oct-2024 cm\n    \n    Parameters\n    ----------\n    HDF5:\n        (input) ICEYE SLC data file in HDF5 format\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd.slc.par)\n    SLC:\n        (output) SLC data file (enter - for none, example: yyyymmdd.slc)\n    dtype:\n        output data type (enter - for default: same as input)\n            * 0: FCOMPLEX\n            * 1: SCOMPLEX\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_ICEYE_SLC', HDF5, SLC_par, SLC, dtype]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_IECAS_SLC(aux_data, slc_Re, slc_Im, date, SLC_par, SLC, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for IECAS SLC data\n    | Copyright 2023, Gamma Remote Sensing, v1.3 18-Apr-2023\n    \n    Parameters\n    ----------\n    aux_data:\n        (input) IECAS SAR auxillary data (POS\\\\*.dat)\n    slc_Re:\n        (input) real part of complex SLC data\n    slc_Im:\n        (input) imaginary part of complex SLC data\n    date:\n        (input) acquistion date format: YYYYMMDD (example 20110121) from aux_data filename\n    SLC_par:\n        (output) ISP SLC parameter file\n    SLC:\n        (output) SLC image\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_IECAS_SLC', aux_data, slc_Re, slc_Im, date, SLC_par, SLC]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_KC_PALSAR_slr(facter_m, CEOS_leader, SLC_par, pol, pls_mode, KC_data, pwr='-', fdtab='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ISP parameter file, Doppler table, and images for PALSAR KC Slant-Range data\n    | Copyright 2023, Gamma Remote Sensing, v2.3 5-Jun-2023 ms/awi/clw/cm\n    \n    Parameters\n    ----------\n    facter_m:\n        (input) PALSAR Kyoto-Carbon parameter file\n    CEOS_leader:\n        (input) PALSAR Kyoto-Carbon leader file (LED)\n    SLC_par:\n        (output) ISP image parameter file (example: yyyymmdd_pp.mli.par)\n    pol:\n        polarization e.g. HH or HV\n    pls_mode:\n        PALSAR acquisition mode:\n            * 1: Fine Beam Single\n            * 2: Fine Beam Double\n            * 3: Wide Beam\n    \n    KC_data:\n        (input) PALSAR Kyoto-Carbon data (named sar_Q\\\\*.dat_\\\\*)\n    pwr:\n        (output) PALSAR Kyoto-Carbon data strip expressed as SAR intensity (enter - for none, example: yyyymmdd_pp.mli)\n    fdtab:\n        (output) table of output polynomials, one polynomial/block used as input to gc_map_fd (enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_KC_PALSAR_slr', facter_m, CEOS_leader, SLC_par, pol, pls_mode, KC_data, pwr, fdtab]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_KS_DGM(HDF5, trunk, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ISP SLC parameter and PRI image files for Kompsat DGM data\n    | Copyright 2023, Gamma Remote Sensing, v1.4 13-Jul-2023 awi/cm\n    \n    Parameters\n    ----------\n    HDF5:\n        (input) DGM data file in HDF5 format\n    trunk:\n        (output) output file name trunk used for output filenames \n            (example: yyyymmdd -> yyyymmdd_pol_beamid.slc yyyymmdd_pol_beamid.pri.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_KS_DGM', HDF5, trunk]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_KS_SLC(HDF5, trunk, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ISP SLC parameter and image files for Kompsat SCS data\n    | Copyright 2023, Gamma Remote Sensing, v1.7 13-Jul-2023 awi/clw/cm\n    \n    Parameters\n    ----------\n    HDF5:\n        (input) SCS data file in HDF5 format\n    trunk:\n        (output) output file name trunk used for output filenames \n            (example: yyyymmdd -> yyyymmdd_pol_beamid.slc yyyymmdd_pol_beamid.slc.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_KS_SLC', HDF5, trunk]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_LT1_SLC(GeoTIFF, annotation_XML, SLC_par, SLC='-', dtype='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter file and SLC image from a LT-1 data set\n    | Copyright 2024, Gamma Remote Sensing, v1.3 17-Jul-2024 awi/cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) image data file in GeoTIFF format (enter - for none, \\\\*.tiff)\n    annotation_XML:\n        (input) LT-1 product annotation XML file (\\\\*.meta.xml)\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd.slc.par)\n    SLC:\n        (output) SLC data file, example: yyyymmdd.slc (enter - for none, SLC output will not be produced)\n    dtype:\n        output data type (enter - for default)\n            * 0: FCOMPLEX (default)\n            * 1: SCOMPLEX\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_LT1_SLC', GeoTIFF, annotation_XML, SLC_par, SLC, dtype]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_MSP(SAR_par, PROC_par, SLC_par, image_format='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP image parameter file from MSP processing parameter and sensor files\n    | Copyright 2024, Gamma Remote Sensing, v3.7 8-May-2024 clw/uw/of\n    \n    Parameters\n    ----------\n    SAR_par:\n        (input) MSP SAR sensor parameter file\n    PROC_par:\n        (input) MSP processing parameter file\n    SLC_par:\n        (output) ISP SLC/MLI image parameter file\n    image_format:\n        image format flag (enter - for default: from MSP processing parameter file)\n            * 0: FCOMPLEX (pairs of 4-byte float)\n            * 1: SCOMPLEX (pairs of 2-byte short integer)\n            * 2: FLOAT (4-bytes/value)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_MSP', SAR_par, PROC_par, SLC_par, image_format]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_NISAR_RSLC(HDF5, root_name, radcal='-', noise='-', band='-', freq='-', pol='-', out_flag='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ISP SLC parameter and image files for NISAR Level-1 RSLC data\n    | Copyright 2025, Gamma Remote Sensing, v1.4 19-May-2025 cm\n    \n    Parameters\n    ----------\n    HDF5:\n        (input) NISAR RSLC data file in HDF5 format (Level-1 Range Doppler Single Look Complex)\n    root_name:\n        (output) root name of the generated output files (example: yyyymmdd)\n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: none\n            * 1: beta0\n            * 2: sigma0 (default)\n            * 3: gamma0\n    \n    noise:\n        noise subtraction using noise equivalent backscatter look-up table (enter - for default)\n            * 0: do not apply noise subtraction (default)\n            * 1: apply noise subtraction\n    \n    band:\n        radar band L or S (enter - for default: all available radar bands)\n    freq:\n        frequencies A or B in case of split imaging bands (enter - for default: all available frequencies)\n    pol:\n        polarization HH, HV, RH, RV, VH, or VV (enter - for default: all available polarizations)\n    out_flag:\n        output flag (enter - for default)\n            * 0: write data and parameter files (default)\n            * 1: only write parameter files\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_NISAR_RSLC', HDF5, root_name, radcal, noise, band, freq, pol, out_flag]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_NovaSAR_GRD(GeoTIFF, XML, polarization, MLI_par, MLI='-', GRD_par='-', GRD='-', rps='-', radcal='-', noise='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate MLI and GRD image and parameter files for NovaSAR GRD and SCD data\n    | Copyright 2023, Gamma Remote Sensing, v1.8 3-Mar-2023 cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) NovaSAR image data file in GeoTIFF format (enter - for none, \\\\*.tif)\n    XML:\n        (input) NovaSAR XML annotation file\n    polarization:\n        image polarization: HH, VV, HV, VH, CH, CV\n    MLI_par:\n        (output) MLI parameter file (example: yyyymmdd_pp.mli.par)\n    MLI:\n        (output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)\n    GRD_par:\n        (output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)\n    GRD:\n        (output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)\n    rps:\n        slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)\n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: beta0 (default)\n            * 1: sigma0\n    \n    noise:\n        noise levels flag (enter - for default)\n            * 0: do not use noise levels (default)\n            * 1: use noise levels\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_NovaSAR_GRD', GeoTIFF, XML, polarization, MLI_par, MLI, GRD_par, GRD, rps, radcal, noise]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_NovaSAR_SLC(GeoTIFF, XML, polarization, SLC_par, SLC='-', dtype='-', radcal='-', noise='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for NovaSAR SLC data\n    | Copyright 2023, Gamma Remote Sensing, v1.6 3-Mar-2023 cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) NovaSAR image data file in GeoTIFF format (enter - for none, \\\\*.tif)\n    XML:\n        (input) NovaSAR XML annotation file\n    polarization:\n        image polarization: HH, VV, HV, VH, CH, CV\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd_pp.slc.par)\n    SLC:\n        (output) SLC data file (enter - for none, example: yyyymmdd_pp.slc)\n    dtype:\n        output data type (enter - for default: same as input)\n            * 0: FCOMPLEX\n            * 1: SCOMPLEX\n    \n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: beta0 (default)\n            * 1: sigma0\n    \n    noise:\n        noise levels flag (enter - for default)\n            * 0: do not use noise levels (default)\n            * 1: use noise levels\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_NovaSAR_SLC', GeoTIFF, XML, polarization, SLC_par, SLC, dtype, radcal, noise]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_NovaSAR_SRD(GeoTIFF, XML, polarization, MLI_par, MLI='-', radcal='-', noise='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate MLI image and parameter files for NovaSAR SRD data\n    | Copyright 2023, Gamma Remote Sensing, v1.3 3-Mar-2023 cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) NovaSAR image data file in GeoTIFF format (enter - for none, \\\\*.tif)\n    XML:\n        (input) NovaSAR XML annotation file\n    polarization:\n        image polarization: HH, VV, HV, VH, CH, CV\n    MLI_par:\n        (output) MLI parameter file (example: yyyymmdd_pp.mli.par)\n    MLI:\n        (output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)\n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: beta0 (default)\n            * 1: sigma0\n    \n    noise:\n        noise levels flag (enter - for default)\n            * 0: do not use noise levels (default)\n            * 1: use noise levels\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_NovaSAR_SRD', GeoTIFF, XML, polarization, MLI_par, MLI, radcal, noise]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_PRI(CEOS_SAR_leader, PRI_par, CEOS_DAT, PRI, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file generation for ERS PRI data from the PGS and VMP processors\n    | Copyright 2020, Gamma Remote Sensing, v1.7 21-Sep-2020 clw/cm\n    \n    Parameters\n    ----------\n    CEOS_SAR_leader:\n        (input) ERS CEOS SAR leader file for PRI product\n    PRI_par:\n        (output) ISP image parameter file (example: <yyyymmdd>.pri.par)\n    CEOS_DAT:\n        (input) CEOS data file (example: DAT_01.001)\n    PRI:\n        (output) PRI data with file and line headers removed (example: <yyyymmdd>.pri)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_PRI', CEOS_SAR_leader, PRI_par, CEOS_DAT, PRI]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_PRI_ESRIN_JERS(CEOS_SAR_leader, PRI_par, CEOS_DAT, PRI, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP GRD parameter file for ESRIN processed JERS PRI data\n    | Copyright 2020, Gamma Remote Sensing, v1.9 21-Sep-2020 clw/uw/cm\n    \n    Parameters\n    ----------\n    CEOS_SAR_leader:\n        (input) ERS CEOS SAR leader file for PRI product\n    PRI_par:\n        (output) ISP image parameter file (example: <yyyymmdd>.pri.par)\n    CEOS_DAT:\n        (input) CEOS data file (example: DAT_01.001)\n    PRI:\n        (output) PRI data with file and line headers removed (example: <yyyymmdd>.pri)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_PRI_ESRIN_JERS', CEOS_SAR_leader, PRI_par, CEOS_DAT, PRI]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_PulSAR(CEOS_SAR_leader, SLC_par, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file generation for ERS SLC data from the PULSAR SAR processor\n    | Copyright 2020, Gamma Remote Sensing, v1.3 21-Sep-2020 clw/uw/cm\n    \n    Parameters\n    ----------\n    CEOS_SAR_leader:\n        (input) ERS CEOS SAR leader file\n    SLC_par:\n        (output) ISP SLC parameter file (example <orbit>.slc.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_PulSAR', CEOS_SAR_leader, SLC_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RCM_GRC(RCM_dir, polarization, radcal, noise, SLC_par='-', SLC='-', GRC_par='-', GRC='-', rps='-', noise_pwr='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate slant and ground range complex images and parameter files from a Radarsat Constellation GRC (Ground Range georeferenced Complex) product\n    | Copyright 2024, Gamma Remote Sensing, v3.0 21-Oct-2024 cm\n    \n    Parameters\n    ----------\n    RCM_dir:\n        (input) Radarsat Constellation main directory path (e.g.: RCM3_OK1001322_PK1001415_1_5M4_20160417_004803_VV_GRC)\n    polarization:\n        image polarization: HH, VV, HV, VH, CH, CV\n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: none (default)\n            * 1: Beta Nought\n            * 2: Sigma Nought\n            * 3: Gamma\n    \n    noise:\n        noise levels flag (enter - for default)\n            * 0: do not use noise levels file (default)\n            * 1: use noise levels file\n            * NOTE: noise levels file can only be used for radiometrically calibrated data (radcal flag: 1, 2, or 3)\n    \n    SLC_par:\n        (output) SLC parameter file (example: yyyymmdd_pp.slc.par, enter - for none)\n    SLC:\n        (output) SLC data file in slant range geometry (example: yyyymmdd_pp.slc, enter - for none)\n    GRC_par:\n        (output) GRC parameter file (example: yyyymmdd_pp.grc.par, enter - for none)\n    GRC:\n        (output) GRC data file (example: yyyymmdd_pp.grc, enter - for none)\n    rps:\n        slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)\n    noise_pwr:\n        (output) noise intensity for each SLC sample in slant range using data from noise levels file (enter - for none)\n            * NOTE: when the noise_pwr file is specified, noise power correction will NOT be applied to the GRC / SLC data values\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RCM_GRC', RCM_dir, polarization, radcal, noise, SLC_par, SLC, GRC_par, GRC, rps, noise_pwr]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RCM_GRD(RCM_dir, polarization, radcal, noise, MLI_par='-', MLI='-', GRD_par='-', GRD='-', rps='-', noise_pwr='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate MLI and GRD images and parameter files from a Radarsat Constellation GRD (Ground Range georeferenced Detected) product\n    | Copyright 2024, Gamma Remote Sensing, v2.9 21-Oct-2024 cm\n    \n    Parameters\n    ----------\n    RCM_dir:\n        (input) Radarsat Constellation main directory path (e.g.: RCM1_OK1001327_PK1001418_1_3M28_20160417_013625_HH_GRD)\n    polarization:\n        image polarization: HH, VV, HV, VH, CH, CV\n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: none (default)\n            * 1: Beta Nought\n            * 2: Sigma Nought\n            * 3: Gamma\n    \n    noise:\n        noise levels flag (enter - for default)\n            * 0: do not use noise levels file (default)\n            * 1: use noise levels file\n            * NOTE: noise levels file can only be used for radiometrically calibrated data (radcal flag: 1, 2, or 3)\n    \n    MLI_par:\n        (output) MLI parameter file (example: yyyymmdd_pp.mli.par, enter - for none)\n    MLI:\n        (output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)\n    GRD_par:\n        (output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)\n    GRD:\n        (output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)\n    rps:\n        slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)\n    noise_pwr:\n        (output) noise intensity for each MLI sample in slant range using data from noise levels file (enter - for none)\n            * NOTE: when the noise_pwr file is specified, noise power correction will NOT be applied to the GRD / MLI data values\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RCM_GRD', RCM_dir, polarization, radcal, noise, MLI_par, MLI, GRD_par, GRD, rps, noise_pwr]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RCM_MLC(RCM_dir, radcal, noise, root_name, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate parameter and image files for Radarsat Constellation MLC (Multi-Look Complex) data from GeoTIFF or NITF format\n    | Copyright 2024, Gamma Remote Sensing, v1.4 21-Oct-2024 cm\n    \n    Parameters\n    ----------\n    RCM_dir:\n        (input) Radarsat Constellation main directory path (e.g.: RCM2_OK1782060_PK1782073_2_SC30MCPC_20200504_105537_CH_CV_MLC)\n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: none (default)\n            * 1: Beta Nought\n            * 2: Sigma Nought\n            * 3: Gamma\n    \n    noise:\n        noise levels flag (enter - for default)\n            * 0: do not use noise levels file (default)\n            * 1: use noise levels file\n            * NOTE: noise levels file can only be used for radiometrically calibrated data (radcal flag: 1, 2, or 3)\n    \n    root_name:\n        (output) root name of the generated output files (example: yyyymmdd)\n            * NOTE: the program will automatically complete the root_name and add extensions for each covariance matrix element\n              for both data and parameter files, such as 20210927_CH.mlc, 20210927_CH.mlc.par, 20210927_XC.mlc, etc.\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RCM_MLC', RCM_dir, radcal, noise, root_name]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RCM_SLC(RCM_dir, polarization, radcal, noise, SLC_par, SLC, noise_pwr='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for Radarsat Constellation SLC data from GeoTIFF or NITF file\n    | Copyright 2024, Gamma Remote Sensing, v2.7 21-Oct-2024 cm\n    \n    Parameters\n    ----------\n    RCM_dir:\n        (input) Radarsat Constellation main directory path (e.g.: RCM2_OK1002260_PK1002436_3_SC50MB_20160417_002427_VH_VV_SLC)\n    polarization:\n        image polarization: HH, VV, HV, VH, CH, CV\n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: none (default)\n            * 1: Beta Nought\n            * 2: Sigma Nought\n            * 3: Gamma\n    \n    noise:\n        noise levels flag (enter - for default)\n            * 0: do not use noise levels file (default)\n            * 1: use noise levels file\n            * NOTE: noise levels file can only be used for radiometrically calibrated data (radcal flag: 1, 2, or 3)\n    \n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd_pp.slc.par)\n    SLC:\n        (output) SLC data file (example: yyyymmdd_pp.slc)\n    noise_pwr:\n        (output) noise intensity for each SLC sample in slant range using data from noise levels file (enter - for none)\n            * NOTE: when the noise_pwr file is specified, noise power correction will NOT be applied to the SLC data values\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RCM_SLC', RCM_dir, polarization, radcal, noise, SLC_par, SLC, noise_pwr]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RCM_SLC_ScanSAR(RCM_dir, polarization, radcal, noise_in, root_name, SLC_tab='-', beam='-', noise_out='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files from Radarsat Constellation ScanSAR SLC data in GeoTIFF or NITF format\n    | Copyright 2024, Gamma Remote Sensing, v3.3 21-Oct-2024 cm\n    \n    Parameters\n    ----------\n    RCM_dir:\n        (input) Radarsat Constellation main directory path (e.g.: RCM2_OK1002260_PK1002436_3_SC50MB_20160417_002427_VH_VV_SLC)\n    polarization:\n        image polarization: HH, VV, HV, VH, CH, CV\n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: none (default)\n            * 1: Beta Nought\n            * 2: Sigma Nought\n            * 3: Gamma\n    \n    noise_in:\n        noise levels flag (enter - for default)\n            * 0: do not use noise levels file (default)\n            * 1: use noise levels file\n            * NOTE: noise levels file can only be used for radiometrically calibrated data (radcal flag: 1, 2, or 3)\n    \n    root_name:\n        (output) root name of the generated output files (example: yyyymmdd_pp)\n            * NOTE: the program will automatically complete the root_name with beam numbers and extensions for the SLC, SLC_par, and TOPS_par files\n    \n    SLC_tab:\n        (output) 3 column list of SLC, SLC_par, and TOPS_par files, with the beams sorted from near to far range (example: yyyymmdd_pp.SLC_tab)\n    beam:\n        number specifying the desired ScanSAR beam number (enter - for default: extract all beams)\n            * NOTE: enter 0 to get the list of the available beams\n    \n    noise_out:\n        output noise intensity for each SLC sample in slant range flag (enter - for default)\n            * 0: do not write noise intensity files (default)\n            * 1: write noise intensity files (file name(s) automatically defined)\n            * NOTE: when noise intensity files are written, noise power correction will NOT be applied to the SLC data values\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RCM_SLC_ScanSAR', RCM_dir, polarization, radcal, noise_in, root_name, SLC_tab, beam, noise_out]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RISAT_GRD(CEOS_leader, BAND_META, GRD_par, CEOS_image, GRD='-', line_dir='-', pix_dir='-', cal_flg='-', KdB='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Read RISAT-1 Ground-Range data from a CEOS data set and perform radiometric calibration\n    | Copyright 2015, Gamma Remote Sensing, v1.2 24-Feb-2015 clw\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) CEOS SAR leader file (example: lea_01.001)\n    BAND_META:\n        (input) BAND_META.txt, additional RISAT system parameters for the scene (format keywork=value)\n    GRD_par:\n        (output) ISP GRD parameter file (example: YYYYMMDD.grd.par)\n    CEOS_image:\n        (input) CEOS Ground-Range image file (example: dat_01.001)\n    GRD:\n        (output) Ground-Range data with file and line headers removed (enter - for none: example: YYYYMMDD.grd)\n    line_dir:\n        set output image line direction (enter - for default):\n            * 0: used value derived from CEOS leader file\n            * 1: retain input data line direction  (default)\n            * -1: reverse input data line direction\n    \n    pix_dir:\n        set output pixel direction (enter - for default):\n            * 0: used value derived from CEOS leader file\n            * 1: retain input data pixel direction (default)\n            * -1: reverse input data pixel direction\n    \n    cal_flg:\n        calibration flag (enter - for default):\n            * 0: do not apply radiometric calibration\n            * 1: apply radiometric calibration including KdB and incidence angle correction (default)\n    \n    KdB:\n        calibration constant (dB) (enter - to use value in the CEOS leader)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RISAT_GRD', CEOS_leader, BAND_META, GRD_par, CEOS_image, GRD, line_dir, pix_dir, cal_flg, KdB]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RISAT_SLC(CEOS_leader, BAND_META, SLC_par, CEOS_image, SLC='-', line_dir='-', pix_dir='-', cal_flg='-', KdB='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Read RISAT-1 CEOS format SLC data and perform radiometric calibration\n    | Copyright 2013, Gamma Remote Sensing, v1.1 3-Jun-2013 clw\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) CEOS SAR leader file (example: lea_01.001)\n    BAND_META:\n        (input) BAND_META.txt, additional RISAT system parameters for the scene (format keywork=value)\n    SLC_par:\n        (output) ISP SLC image parameter file (example: YYYYMMDD.grd.par)\n    CEOS_image:\n        (input) CEOS SLC image file (example: dat_01.001)\n    SLC:\n        (output) SLC data with file and line headers removed (enter - for none: example: YYYYMMDD.grd)\n    line_dir:\n        set output image line direction (enter - for default):\n            * 0: used value derived from CEOS leader file\n            * 1: retain input data line direction  (default)\n            * -1: reverse input data line direction\n    \n    pix_dir:\n        set output pixel direction (enter - for default):\n            * 0: used value derived from CEOS leader file\n            * 1: retain input data pixel direction (default)\n            * -1: reverse input data pixel direction\n    \n    cal_flg:\n        calibration flag (enter - for default):\n            * 0: do not apply radiometric calibration\n            * 1: apply radiometric calibration including KdB and incidence angle correction (default)\n    \n    KdB:\n        calibration constant (dB) (enter - to use value in the CEOS leader)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RISAT_SLC', CEOS_leader, BAND_META, SLC_par, CEOS_image, SLC, line_dir, pix_dir, cal_flg, KdB]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RSAT2_SG(product_XML, lut_XML, GeoTIFF, polarization, MLI_par='-', MLI='-', GRD_par='-', GRD='-', rps='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate MLI and GRD images and parameter files from Radarsat 2 SGF/SGX/SCF data\n    | Copyright 2023, Gamma Remote Sensing, v2.2 7-Jun-2023 awi/cw/cm\n    \n    Parameters\n    ----------\n    product_XML:\n        (input) Radarsat-2 product annotation XML file (product.xml)\n    lut_XML:\n        (input) Radarsat-2 calibration XML file (lutSigma.xml), use - for no calibration\n    GeoTIFF:\n        (input) image data file in GeoTIFF format (imagery_PP.tif)\n    polarization:\n        (input) image polarization: HH, VV, HV, VH\n    MLI_par:\n        (output) MLI parameter file (example: yyyymmdd_pp.mli.par, enter - for none)\n    MLI:\n        (output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)\n    GRD_par:\n        (output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)\n    GRD:\n        (output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)\n    rps:\n        slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)\n            * NOTE: Ground range geometry is less accurate than slant range geometry and should be avoided\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSAT2_SG', product_XML, lut_XML, GeoTIFF, polarization, MLI_par, MLI, GRD_par, GRD, rps]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RSAT2_SLC(product_XML, lut_XML, GeoTIFF, polarization, SLC_par, SLC, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for Radarsat 2 SLC data from GeoTIFF\n    | Copyright 2023, Gamma Remote Sensing, v2.9 7-Jun-2023 awi/clw/cm\n    \n    Parameters\n    ----------\n    product_XML:\n        (input) Radarsat-2 product annotation XML file (product.xml)\n    lut_XML:\n        (input) Radarsat-2 calibration XML file (lutSigma.xml), use - for no calibration\n    GeoTIFF:\n        (input) image data file in GeoTIFF format (imagery_PP.tif)\n    polarization:\n        (input) image polarization: HH, VV, HV, VH\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd_pp.slc.par)\n    SLC:\n        (output) SLC data file (example: yyyymmdd_pp.slc)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSAT2_SLC', product_XML, lut_XML, GeoTIFF, polarization, SLC_par, SLC]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RSAT_SCW(CEOS_leader, CEOS_trailer, CEOS_data, GRD_par, GRD, sc_dB='-', dt='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file for SCANSAR Wide Swath Data\n    | Copyright 2020, Gamma Remote Sensing, v2.2 3-Sep-2020 clw/cm\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) CEOS SAR leader file\n    CEOS_trailer:\n        (input) CEOS SAR trailer file\n    CEOS_data:\n        (input) CEOS data file binary)\n    GRD_par:\n        (output) ISP ground range image parameter file (example <orbit>.mli.par)\n    GRD:\n        (output) ISP ground range image (example <orbit>.mli) (enter -  for none, float)\n    sc_dB:\n        intensity scale factor in dB (enter - for default:   0.00)\n    dt:\n        azimuth image time offset (s) (enter - for default = 0.0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSAT_SCW', CEOS_leader, CEOS_trailer, CEOS_data, GRD_par, GRD, sc_dB, dt]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RSAT_SGF(CEOS_leader, CEOS_data, GRD_par, GRD, sc_dB='-', dt='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file for RSI/Atlantis Radarsat SGF (ground range) and SCANSAR SCW16 data\n    | Copyright 2020, Gamma Remote Sensing, v2.4 3-Sep-2020 clw/cm\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) CEOS leader file (RSI SGF or SCW16 products, LEA_01.001)\n    CEOS_data:\n        (input) CEOS data file (RSI SGF or SCW16 products, DAT_01.001) \n    GRD_par:\n        (output) ISP ground range image parameter file (example <orbit>.mli.par)\n    GRD:\n        (output) ISP ground range image (example <orbit>.grd.par) (enter -  for none, float)\n    sc_dB:\n        intensity scale factor in dB (enter - for default:   0.00)\n    dt:\n        azimuth image time offset (s) (enter - for default = 0.0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSAT_SGF', CEOS_leader, CEOS_data, GRD_par, GRD, sc_dB, dt]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RSAT_SLC(CEOS_leader, SLC_par, CEOS_data, SLC='-', sc_dB='-', dt='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file for RSI/Atlantis/ASF processed Radarsat SLC data\n    | Copyright 2012, Gamma Remote Sensing, v4.0 5-Sep-2012 clw\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) CEOS SAR leader file (example: lea_01.001)\n    SLC_par:\n        (output) ISP SLC parameter file (example: <date>.slc.par)\n    CEOS_data:\n        (input) CEOS data file (example: dat_01.001)\n    SLC:\n        (output) SLC data with file and line headers removed (example: <date>.slc)\n    sc_dB:\n        intensity scale factor in dB (enter - for default:  60.00)\n    dt:\n        azimuth image time offset (s) (enter - for default = 0.0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSAT_SLC', CEOS_leader, SLC_par, CEOS_data, SLC, sc_dB, dt]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_RSI_ERS(CEOS_SAR_leader, SLC_par, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP parameter file for RSI processed ERS SLC data\n    | Copyright 2020, Gamma Remote Sensing, v1.8 3-Sep-2020 clw/uw/cm\n    \n    Parameters\n    ----------\n    CEOS_SAR_leader:\n        (input) ERS CEOS SAR leader file\n    SLC_par:\n        (output) ISP SLC parameter file (example <orbit>.slc.par)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_RSI_ERS', CEOS_SAR_leader, SLC_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_S1_GRD(GeoTIFF, annotation_XML, calibration_XML, noise_XML, MLI_par, MLI, GRD_par='-', GRD='-', eflg='-', rps='-', noise_pwr='-', edge_flag='-', loff='-', nl='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate MLI and GRD images and parameter files from a Sentinel-1 GRD product\n    | Copyright 2023, Gamma Remote Sensing, v4.8 27-Apr-2023 awi/clw/ts/cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) image data file in GeoTIFF format (enter - for none, \\\\*.tiff)\n    annotation_XML:\n        (input) Sentinel-1 L1 XML annotation file\n    calibration_XML:\n        (input) Sentinel-1 L1 radiometric calibration XML file (enter - for no radiometric calibration)\n    noise_XML:\n        (input) Sentinel-1 L1 noise XML file (enter - to not subtract thermal noise power level)\n    MLI_par:\n        (output) MLI parameter file (example: yyyymmdd_pp.mli.par)\n    MLI:\n        (output) MLI data file in slant range geometry (example: yyyymmdd_pp.mli, enter - for none)\n    GRD_par:\n        (output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)\n    GRD:\n        (output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)\n    eflg:\n        GR-SR grid extrapolation flag (enter - for default)\n            * 0: no extrapolation of the GR-SR grid beyond the grid boundaries\n            * 1: permit extrapolation of the GR-SR grid to cover the entire image (default)\n            * NOTE: extrapolation of the GR-SR grid may introduce geocoding errors\n    \n    rps:\n        slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)\n    noise_pwr:\n        noise intensity for each MLI sample in slant range using data from noise_XML (enter - for none)\n            * NOTE: when the noise_pwr file is specified, noise power correction will NOT be applied to the MLI data values\n    \n    edge_flag:\n        edge cleaning flag (enter - for default)\n            * 0: do not clean edges (default for Sentinel-1 IPF version >= 2.90)\n            * 1: basic method\n            * 2: elaborate method based on Canny edge detection (default for Sentinel-1 IPF version < 2.90)\n            * 3: force basic method when Sentinel-1 IPF version >= 2.90\n            * 4: force elaborate method based on Canny edge detection when Sentinel-1 IPF version >= 2.90\n            * NOTE: options 1 and 2 are changed to 0 when Sentinel-1 IPF version >= 2.90\n    \n    loff:\n        offset to starting line of the input segment (enter - for default: 0)\n    nl:\n        number of lines to read from the file beginning at loff (enter - for default: to end of file)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_S1_GRD', GeoTIFF, annotation_XML, calibration_XML, noise_XML, MLI_par, MLI, GRD_par, GRD, eflg, rps, noise_pwr, edge_flag, loff, nl]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_S1_SLC(GeoTIFF, annotation_XML, calibration_XML, noise_XML, SLC_par, SLC, TOPS_par='-', dtype='-', sc_dB='-', noise_pwr='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for Sentinel-1 SLC data\n    | Copyright 2025, Gamma Remote Sensing, v5.7 14-Apr-2025 awi/clw/cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) image data file in \\\\*.tiff GeoTIFF format (enter - for default: none)\n    annotation_XML:\n        (input) Sentinel-1 L1 XML annotation file\n    calibration_XML:\n        (input) Sentinel-1 L1 radiometric calibration XML file to generate output as sigma0\n            (enter - for default: return uncalibrated digital numbers)\n    noise_XML:\n        (input) Sentinel-1 L1 noise XML file (enter - for default: no subtraction of thermal noise power)\n    SLC_par:\n        (output) ISP SLC parameter file. Example: yyyymmdd_iw1_vv.slc.par\n    SLC:\n        (output) SLC data file (enter - for default: none). Example: yyyymmdd_iw1_vv.slc\n    TOPS_par:\n        (output) SLC burst annotation file; for TOPS and EW SLC data only (enter - for default: none). Example: yyyymmdd_iw1_vv.slc.tops_par\n    dtype:\n        output data type (enter - for default)\n            * 0: FCOMPLEX (default)\n            * 1: SCOMPLEX\n    \n    sc_dB:\n        scale factor for FCOMPLEX -> SCOMPLEX, (enter - for default: HH,VV (dB): 60.0000,  VH,HV: 70.0000)\n    noise_pwr:\n        noise intensity for each SLC sample in slant range using data from noise_XML (enter - for none)\n            * NOTE: when the noise_pwr file is specified, noise power will NOT be subtracted from the image data values\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_S1_SLC', GeoTIFF, annotation_XML, calibration_XML, noise_XML, SLC_par, SLC, TOPS_par, dtype, sc_dB, noise_pwr]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_SAOCOM_GRD(data, XML, MLI_par, MLI='-', GRD_par='-', GRD='-', rps='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate MLI parameter and image files for SAOCOM L1B Ground Range Detected Images\n    | Copyright 2025, Gamma Remote Sensing, v1.0 13-Jan-2025 cm\n    \n    Parameters\n    ----------\n    data:\n        (input) SAOCOM image data file in binary format (enter - for none, e.g. di--acqId0000729082-a-tw--2411281122-hh-m)\n    XML:\n        (input) SAOCOM XML annotation file (e.g. di--acqId0000729082-a-tw--2411281122-hh-m.xml)\n    MLI_par:\n        (output) MLI parameter file (example: yyyymmdd_pp.mli.par)\n    MLI:\n        (output) MLI data file (FCOMPLEX, enter - for none, example: yyyymmdd_pp.mli)\n    GRD_par:\n        (output) GRD parameter file (example: yyyymmdd_pp.grd.par, enter - for none)\n    GRD:\n        (output) GRD data file (example: yyyymmdd_pp.grd, enter - for none)\n    rps:\n        slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SAOCOM_GRD', data, XML, MLI_par, MLI, GRD_par, GRD, rps]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_SAOCOM_SLC(data, XML, SLC_par, SLC='-', TOPS_par='-', RSLC_par='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for SAOCOM stripmap and TOPS SLC data\n    | Copyright 2023, Gamma Remote Sensing, v1.6 21-Mar-2023 cm\n    \n    Parameters\n    ----------\n    data:\n        (input) SAOCOM image data file in binary format (enter - for none, e.g. slc-acqId0000089010-a-tna-0000000000-s3qp-hh)\n    XML:\n        (input) SAOCOM XML annotation file (e.g. slc-acqId0000089010-a-tna-0000000000-s3qp-hh.xml)\n    SLC_par:\n        (output) SLC parameter file (example: yyyymmdd_s3_pp.slc.par)\n    SLC:\n        (output) SLC data file (FCOMPLEX, enter - for none, example: yyyymmdd_s3_pp.slc)\n    TOPS_par:\n        (output) SLC burst annotation file, TOPS data only (enter - for none, example: yyyymmdd_s3_vv.slc.tops_par)\n    RSLC_par:\n        (input) reference SLC parameter file to keep consistent range pixel spacing (example: yyyymmdd_s1_pp.slc.par)\n            * NOTE: SAOCOM geocoded data in GeoTIFF format (GEC and GTC / level 1C and 1D data) can be read using par_SAOCOM_geo program\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SAOCOM_SLC', data, XML, SLC_par, SLC, TOPS_par, RSLC_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_SICD_SLC(NITF, radcal, noise, SLC_par, SLC='-', XML='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for SICD SLC data\n    | Copyright 2025, Gamma Remote Sensing, v2.0 28-Apr-2025 cm\n    \n    Parameters\n    ----------\n    NITF:\n        (input) Sensor Independent Complex Data (SICD) file in NITF 2.1 container file (e.g.: CAPELLA_C03_SM_SICD_HH_20210512034455_20210512034459.ntf)\n    radcal:\n        radiometric calibration flag (enter - for default)\n            * 0: none\n            * 1: beta0 (default)\n            * 2: sigma0\n            * 3: gamma0\n            * 4: RCS (target radar cross section in m^2)\n    \n    noise:\n        noise levels flag (enter - for default)\n            * 0: do not use noise levels (default)\n            * 1: use noise levels\n    \n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd.slc.par)\n    SLC:\n        (output) SLC data file (enter - for none, example: yyyymmdd.slc)\n    XML:\n        (output) XML metadata file (enter - for none, example: CAPELLA_C03_SM_SICD_HH_20210512034455_20210512034459.xml)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SICD_SLC', NITF, radcal, noise, SLC_par, SLC, XML]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_SIRC(CEOS_leader, SLC_par, UTC_MET='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP SLC parameter file from SIR-C CEOS leader file\n    | Copyright 2025, Gamma Remote Sensing, v2.7 28-May-2025 clw/uw\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) JPL SIR-C CEOS leader file\n    SLC_par:\n        (output) ISP SLC parameter file\n    UTC_MET:\n        time reference for state vectors: MET (Mission Elapsed Time) or UTC (enter - for default: UTC)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SIRC', CEOS_leader, SLC_par, UTC_MET]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_STRIX(CEOS_leader, SLC_par, CEOS_data, SLC='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for Synspective StriX SLC data\n    | Copyright 2023, Gamma Remote Sensing, v1.5 9-May-2023 awi/cm\n    \n    Parameters\n    ----------\n    CEOS_leader:\n        (input) CEOS leader file for STRIX-alpha SLC data (LED-STRIXA...)\n    SLC_par:\n        (output) ISP image parameter file (example: yyyymmdd.slc.par)\n    CEOS_data:\n        (input) STRIX-alpha CEOS format SLC (IMG-pp-STRIXA...)\n    SLC:\n        (output) reformatted STRIX SLC (example: yyyymmdd.slc, enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_STRIX', CEOS_leader, SLC_par, CEOS_data, SLC]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_SV2_SLC(GeoTIFF, annotation_XML, SLC_par, SLC='-', dtype='-', radcal='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter file and SLC image from a SuperView Neo-2 / SuperView-2 / Gaojing-2 data set\n    | Copyright 2025, Gamma Remote Sensing, v1.3 12-May-2025 awi/cm\n    \n    Parameters\n    ----------\n    GeoTIFF:\n        (input) image data file in GeoTIFF format (enter - for none, \\\\*.tiff)\n    annotation_XML:\n        (input) SV-2 product annotation XML file (\\\\*.meta.xml)\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd.slc.par)\n    SLC:\n        (output) SLC data file, example: yyyymmdd.slc (enter - for none, SLC output will not be produced)\n    dtype:\n        output data type (enter - for default)\n            * 0: FCOMPLEX (default)\n            * 1: SCOMPLEX\n    \n    radcal:\n        output radiometric calibration flag (enter - for default)\n            * 0: beta0\n            * 1: sigma0 (default)\n            * 2: gamma0\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SV2_SLC', GeoTIFF, annotation_XML, SLC_par, SLC, dtype, radcal]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_SWOT_SLC(NETCDF, trunk, DEM='-', DEM_par='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter and image files for SWOT level 1B KaRIn SLC data\n    | Copyright 2024, Gamma Remote Sensing, v1.2 30-Oct-2024 cm\n    \n    Parameters\n    ----------\n    NETCDF:\n        (input) SWOT level 1B KaRIn SLC data file in NETCDF format (``SWOT_L1B_..._PIC0_01.nc``)\n    trunk:\n        (output) file name trunk used for output filenames\n            (example: yyyymmdd -> yyyymmdd_L_minus_y.slc yyyymmdd_L_minus_y.slc.par)\n    DEM:\n        (output) DEM file in SCH coordinates (enter - for none)\n    DEM_par:\n        (output) DEM parameter file in SCH coordinates (enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_SWOT_SLC', NETCDF, trunk, DEM, DEM_par]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_TX_GRD(annotation_XML, GeoTIFF, GRD_par, GRD='-', pol='-', MLI_par='-', MLI='-', rps='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate ground range image and image parameter file for Terrasar-X MGD data in GeoTIFF format\n    | Copyright 2023, Gamma Remote Sensing, v1.5 8-May-2023 awi/clw/cm\n    \n    Parameters\n    ----------\n    annotation_XML:\n        (input) Terrasar-X product annotation XML file\n    GeoTIFF:\n        (input) image data file in GeoTIFF format\n            * NOTE: make sure the data set contains the selected polarization\n    \n    GRD_par:\n        (output) ISP ground range image parameter file (example: yyyymmdd.grd.par, enter - for none)\n    GRD:\n        (output) calibrated ground range data file (example: yyyymmdd.grd, enter - for none)\n    pol:\n        polarization: HH, HV, VH, VV (enter - for default: first polarization found in the annotation_XML)\n    MLI_par:\n        (output) MLI parameter file (example: yyyymmdd.mli.par, enter - for none)\n    MLI:\n        (output) MLI data file in slant range geometry (example: yyyymmdd.mli, enter - for none)\n    rps:\n        slant range pixel spacing (m) (enter - for default: calculated from ground-range parameters)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_TX_GRD', annotation_XML, GeoTIFF, GRD_par, GRD, pol, MLI_par, MLI, rps]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_TX_ScanSAR(annotation_XML, swath, SLC_par, SLC, TOPS_par, bwflg='-', dtype='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC, SLC_par and TOPS_par from a Terrasar-X ScanSAR data set\n    | Copyright 2023, Gamma Remote Sensing, v2.4 18-Apr-2023 clw/cm/awi\n    \n    Parameters\n    ----------\n    annotation_XML:\n        (input) TerraSAR-X ScanSAR product annotation XML file including path\n            * NOTE: The path to the image products is determined from the path to the XML annotation\n    \n    swath:\n        number specifying the desired ScanSAR swath (1 -> maximum number of swaths (4 or 6))\n            * NOTE: The image product name is specified in the XML file\n    \n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd.slc.par)\n    SLC:\n        (output) SLC ScanSAR data file, example: yyyymmdd.slc\n            (enter - for none, SLC output will not be produced)\n    TOPS_par:\n        (output) SLC ScanSAR burst annotation file (example: yyyymmdd_s1.slc.tops_par\n    bwflg:\n        burst window flag (enter - for default)\n            * 0: use first and last annotation line values specified in the annotation_XML\n            * 1: extend first and last valid line to include all data lines (default)\n    \n    dtype:\n        output data type (enter - for default)\n            * 0: same as input (default)\n            * 1: FCOMPLEX\n            * NOTE: While TSX ScanSAR data are not acquired in TOPS mode, the same data structure can be used for burst annotation\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_TX_ScanSAR', annotation_XML, swath, SLC_par, SLC, TOPS_par, bwflg, dtype]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_TX_SLC(annotation_XML, COSAR, SLC_par, SLC, pol='-', dtype='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate SLC parameter file and SLC image from a Terrasar-X SSC data set\n    | Copyright 2023, Gamma Remote Sensing, v2.5 6-Mar-2023 awi/clw/cm\n    \n    Parameters\n    ----------\n    annotation_XML:\n        (input) TerraSAR-X product annotation XML file\n    COSAR:\n        (input) COSAR SSC stripmap or spotlight mode SLC data file\n    SLC_par:\n        (output) ISP SLC parameter file (example: yyyymmdd.slc.par)\n    SLC:\n        (output) SLC data file, example: yyyymmdd.slc (enter - for none, SLC output will not be produced)\n    pol:\n        polarization HH, HV, VH, VV (enter - for default: first polarization found in the annotation_XML)\n    dtype:\n        output data type (enter - for default)\n            * 0: same as input (default)\n            * 1: FCOMPLEX\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_TX_SLC', annotation_XML, COSAR, SLC_par, SLC, pol, dtype]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef par_UAVSAR_SLC(ann, SLC_MLC_in, SLC_MLI_par, SLC_MLI_out='-', image_type='-', image_format='-', DOP='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP image parameter file from UAVSAR annotation file (ann) for SLC and MLC products\n    | Copyright 2025, Gamma Remote Sensing, v2.0 31-Mar-2025 clw/cm\n    \n    Parameters\n    ----------\n    ann:\n        (input) UAVSAR annotation file (\\\\*ann.txt or \\\\*.ann)\n    SLC_MLC_in:\n        (input) UAVSAR binary data file (required for annotation file version 1.2) (enter - for none)\n    SLC_MLI_par:\n        (output) ISP image parameter file\n    SLC_MLI_out:\n        (output) SLC data file (enter - for none)\n    image_type:\n        image type flag (enter - for default)\n            * 0: SLC (slc) in slant range coordinates (default)\n            * 1: MLC (mlc) in slant range coordinates\n              HHHH\\\\*, VVVV\\\\*, HVHV\\\\* are FLOAT format\n              HHHV\\\\*, HHVV\\\\*, HVVV\\\\* are FCOMPLEX format\n    image_format:\n        image data format flag (enter - for default)\n            * 0: FCOMPLEX (pairs of 4-byte float (re,im)) (default)\n            * 2: FLOAT  (4-bytes/value)\n    \n    DOP:\n        (input) UAVSAR Doppler look-up table (if not zero-Doppler) (enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/par_UAVSAR_SLC', ann, SLC_MLC_in, SLC_MLI_par, SLC_MLI_out, image_type, image_format, DOP]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ph_slope_base(int_in, SLC_par, OFF_par, base, int_out, int_type='-', inverse='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Subtract/add interferogram flat-Earth phase trend as estimated from initial baseline\n    | Copyright 2023, Gamma Remote Sensing, v4.5 19-Apr-2023 clw\n    \n    Parameters\n    ----------\n    int_in:\n        (input) interferogram (FCOMPLEX) or unwrapped phase (FLOAT) (unflattened)\n    SLC_par:\n        (input) ISP parameter file for the reference SLC\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    base:\n        (input) baseline file\n    int_out:\n        (output) interferogram (FCOMPLEX) or unwrapped phase (FLOAT) with phase trend subtracted/added\n    int_type:\n        interferogram type (enter - for default)\n            * 0: unwrapped phase\n            * 1: complex interferogram (default)\n    \n    inverse:\n        subtract/add inversion flag (enter - for default)\n            * 0: subtract phase ramp (default)\n            * 1: add phase ramp\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ph_slope_base', int_in, SLC_par, OFF_par, base, int_out, int_type, inverse]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef phase_slope(interf, slopes, width, win_sz='-', thres='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate interferogram phase slopes in range and azimuth\n    | Copyright 2023, Gamma Remote Sensing, v1.4 18-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    interf:\n        (input) interferogram (fcomplex)\n    slopes:\n        (output) range and azimuth phase slopes (fcomplex)\n    width:\n        number of samples/row\n    win_sz:\n        size of region used for slopes determination (enter - for default: 5)\n    thres:\n        correlation threshold for accepting slope estimates 0.0 -> 1.0 (enter - for default: .4)\n    xmin:\n        starting range pixel offset (enter - for default: 0)\n    xmax:\n        last range pixel offset (enter - for default: width-1)\n    ymin:\n        starting azimuth row offset (enter - for default: 0)\n    ymax:\n        last azimuth row offset (enter - for default: nlines-1)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/phase_slope', interf, slopes, width, win_sz, thres, xmin, xmax, ymin, ymax]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef PRC_vec(SLC_par, PRC, nstate='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | State vectors from ERS PRC orbit data for ISP processing clw/uw\n    | Copyright 2023, Gamma Remote Sensing, v1.9 11-Oct-2023 clw\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input/output) ISP SLC/MLI image parameter file\n    PRC:\n        (input) PRC state vector file\n    nstate:\n        number of state vectors (enter - for default: 5, maximum: 1024)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/PRC_vec', SLC_par, PRC, nstate]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ptarg_cal_MLI(MLI_par, MLI, r_samp, az_samp, psigma, c_r_samp, c_az_samp, ptr_image, r_plot, az_plot, pcal, osf='-', win='-', pltflg='-', psz='-', csz='-', theta_inc='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Point target analysis and radiometric calibration of slant-range and ground-range (GRD) images\n    | Copyright 2016, Gamma Remote Sensing, v2.6 19-Feb-2016 clw\n    \n    Parameters\n    ----------\n    MLI_par:\n        (input) slant-range or ground-range image parameter file for detected intensity data\n    MLI:\n        (input) ground-range or slant range detected image in FLOAT format\n    r_samp:\n        point target range sample number, target region size is 16x16\n    az_samp:\n        point target azimuth line number, target region size is 16x16\n    psigma:\n        radar cross-section of the calibration target in m\\\\*\\\\*2\n    c_r_samp:\n        clutter region center range sample number, clutter region size is 16x16\n    c_az_samp:\n        clutter region center azimuth line number, clutter region size is 16x16\n    ptr_image:\n        (output) oversampled point target image, with and without phase gradient, nominal width: 256\n    r_plot:\n        (output) range point target response plot data (text format)\n    az_plot:\n        (output) azimuth point target response plot data (text format)\n    pcal:\n        (output) measured point target parameters and radiometric calibration factor (text format)\n    osf:\n        image over-sampling factor, 2, 4, 8, 16, 32, 64 (enter - for default: 16)\n    win:\n        maximum search window offset (samples) (enter - for default: 1)\n    pltflg:\n        plotting mode flag:\n            * 0: none\n            * 1: output plots in PNG format (default)\n            * 2: screen output\n            * 3: output plots in PDF format\n    \n    psz:\n        point target region size (samples) (enter - for default: 16)\n    csz:\n        clutter region size (samples) (enter - for default: 16)\n    theta_inc:\n        incidence angle required for calibration of terrain corrrected RISAT-1 images\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ptarg_cal_MLI', MLI_par, MLI, r_samp, az_samp, psigma, c_r_samp, c_az_samp, ptr_image, r_plot, az_plot, pcal, osf, win, pltflg, psz, csz, theta_inc]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ptarg_cal_SLC(SLC_par, SLC, r_samp, az_samp, psigma, c_r_samp, c_az_samp, ptr_image, r_plot, az_plot, pcal, osf='-', win='-', pltflg='-', psz='-', csz='-', c_image='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Point target analysis and radiometric calibration of SLC images\n    | Copyright 2016, Gamma Remote Sensing, v2.4 19-Feb-2016 clw\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) SLC image parameter file\n    SLC:\n        (input) SLC image in FCOMPLEX or SCOMPLEX format\n    r_samp:\n        point target range sample number, target region size is 16x16\n    az_samp:\n        point target azimuth line number, target region size is 16x16\n    psigma:\n        radar cross-section of the calibration target in m\\\\*\\\\*2\n    c_r_samp:\n        clutter region center range sample number, clutter region size is 16x16\n    c_az_samp:\n        clutter region center azimuth line number, clutter region size is 16x16\n    ptr_image:\n        (output) oversampled point target image, with and without phase gradient, nominal width: 256\n    r_plot:\n        (output) range point target response plot data (text format)\n    az_plot:\n        (output) azimuth point target response plot data (text format)\n    pcal:\n        (output) measured point target parameters and radiometric calibration factor (text format)\n    osf:\n        image over-sampling factor, 2, 4, 8, 16, 32, 64 (enter - for default: 16)\n    win:\n        maximum search window offset (samples) (enter - for default: 1)\n    pltflg:\n        plotting mode flag:\n            * 0: none\n            * 1: output plots in PNG format (default)\n            * 2: screen output\n            * 3: output plots in PDF format\n    \n    psz:\n        point target region size (samples) (enter - for default: 16)\n    csz:\n        clutter region size (samples) (enter - for default: 16)\n    c_image:\n        (output) clutter region image (FCOMPLEX format)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ptarg_cal_SLC', SLC_par, SLC, r_samp, az_samp, psigma, c_r_samp, c_az_samp, ptr_image, r_plot, az_plot, pcal, osf, win, pltflg, psz, csz, c_image]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ptarg_SLC(SLC_par, SLC, r_samp, az_samp, ptr_image, r_plot, az_plot, ptr_par='-', osf='-', win='-', pltflg='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Point target response analysis and interpolation for SLC images\n    | Copyright 2024, Gamma Remote Sensing, v2.0 4-Oct-2024 clw\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) SLC image parameter file\n    SLC:\n        (input) SLC image in FCOMPLEX or SCOMPLEX format\n    r_samp:\n        point target range sample number\n    az_samp:\n        point target azimuth line number\n    ptr_image:\n        (output) oversampled point target image (fcomplex, 1024x1024 samples), with and without phase gradient\n    r_plot:\n        (output) range point target response plot data (text format)\n    az_plot:\n        (output) azimuth point target response plot data (text format)\n    ptr_par:\n        (output) measured point target parameters (text format)\n    osf:\n        image over-sampling factor, 2, 4, 8, 16, 32, 64 (enter - for default: 16)\n    win:\n        maximum search window offset (samples) (enter - for default: 1)\n    pltflg:\n        plotting mode flag:\n            * 0: none\n            * 1: output plots in PNG format (default)\n            * 2: screen output\n            * 3: output plots in PDF format\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ptarg_SLC', SLC_par, SLC, r_samp, az_samp, ptr_image, r_plot, az_plot, ptr_par, osf, win, pltflg]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef radcal_MLI(MLI, MLI_par, OFF_par, CMLI, antenna='-', rloss_flag='-', ant_flag='-', refarea_flag='-', sc_dB='-', K_dB='-', pix_area='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Radiometric calibration for multi-look intensity (MLI) data\n    | Copyright 2023, Gamma Remote Sensing, v2.4 6-Jul-2023 uw/clw/of\n    \n    Parameters\n    ----------\n    MLI:\n        (input) MLI image (FLOAT)\n    MLI_par:\n        (input) SLC parameter file of input MLI image\n    OFF_par:\n        (input) ISP offset/interferogram parameter file (enter - for images in MLI geometry)\n    CMLI:\n        (output) radiometrically calibrated output MLI (FLOAT)\n    antenna:\n        (input) 1-way antenna gain pattern file (enter - for none)\n    rloss_flag:\n        range spreading loss correction (enter - for default)\n            * 0: no correction (default)\n            * 1: apply r^3 correction  (all modes except ASAR APS)\n            * 2: apply r^4 correction (used only for ASAR APS mode)\n            * -1: undo r^3 correction\n            * -2: undo r^4 correction\n    \n    ant_flag:\n        antenna pattern correction (enter - for default)\n            * 0: no correction (default)\n            * 1: apply antenna pattern correction\n            * -1: undo antenna pattern correction\n    \n    refarea_flag:\n        reference pixel area correction (enter - for default)\n            * 0: no pixel area correction (default)\n            * 1: calculate sigma0, scale area by sin(inc_ang)/sin(ref_inc_ang)\n            * 2: calculate gamma0, scale area by sin(inc_ang)/(cos(inc_ang)\\\\*sin(ref_inc_ang)\n            * -1: undo sigma0 area scaling factor\n            * -2: undo gamma0 area scaling factor\n    \n    sc_dB:\n        scale factor in dB (enter - for default: 0.0)\n    K_dB:\n        calibration factor in dB (enter - for default: value from MLI_par)\n    pix_area:\n        (output) ellipsoid-based ground range sigma0 or gamma0 pixel reference area (FLOAT) (enter - for none)\n            refarea_flag 1 or -1: sigma0 ref. area\n            refarea_flag 2 or -2: gamma0 ref. area\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/radcal_MLI', MLI, MLI_par, OFF_par, CMLI, antenna, rloss_flag, ant_flag, refarea_flag, sc_dB, K_dB, pix_area]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef radcal_PRI(PRI, PRI_par, GRD, GRD_par, K_dB='-', inc_ref='-', roff='-', nr='-', loff='-', nl='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Convert ESA processed short integer format PRI to radiometrically calibrated GRD image (float)\n    | Copyright 2023, Gamma Remote Sensing, v1.7 19-Apr-2023 uw/clw\n    \n    Parameters\n    ----------\n    PRI:\n        (input) PRI ground-range image (short integer, sqrt(backscat. intensity)\n    PRI_par:\n        (input) SLC parameter file of input PRI ground-range image (yyyymmdd.pri.par)\n    GRD:\n        (output) calibrated ground-range image (float, backscat. intensity)\n    GRD_par:\n        (output) ISP image parameter file of output calibrated ground-range image (yyyymmdd.grd.par)\n    K_dB:\n        calibration factor in decibels (enter - for default: 59.75 dB)\n            ERS1 (D-Paf,ESRIN): 58.24 dB, ERS2 (D-Paf,ESRIN,I-Paf,UK-Paf after 1997): 59.75 dB\n            ENVISAT ASAR: 55.0 dB (all modes)\n            for details see product specifications and ESA publications.\n    inc_ref:\n        reference incidence angle in deg. (enter - for default: 23.0 deg.)\n            ENVISAT ASAR: 90.0 deg. (all modes)\n    roff:\n        offset to starting range sample (enter - for default: 0)\n    nr:\n        number of range samples (enter - for default: to end of line)\n    loff:\n        offset to starting line (enter - for default: 0, 1 header line in the input file is assumed for ERS)\n    nl:\n        number of lines to copy (enter - for default: to end of file)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/radcal_PRI', PRI, PRI_par, GRD, GRD_par, K_dB, inc_ref, roff, nr, loff, nl]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef radcal_pwr_stat(SLC_tab, SLC_tab_cal, plist, MSR_cal, PWR_cal, roff='-', loff='-', nr='-', nl='-', plist_out='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate calibrated SLC image files using point targets determined from the Mean/Sigma Ratio and Intensity\n    | Copyright 2022, Gamma Remote Sensing, v1.5 8-Nov-2022 clw/uw/cm\n    \n    Parameters\n    ----------\n    SLC_tab:\n        (input) two column list of the SLC filenames and SLC parameter filenames of the uncalibrated SLC images\n    SLC_tab_cal:\n        (input) two column list of the SLC filenames and SLC parameter filenames of the calibrated SLC images (enter - for none)\n    plist:\n        (input) point list for the point to use for calibraton (int, enter - to use the data to determine the calibration points)\n    MSR_cal:\n        mean/sigma ratio for point target selection for relative calibration between scenes:    1.500\n    PWR_cal:\n        intensity threshold ratio for point target selection for relative calibration between scenes:    1.000\n    roff:\n        offset to starting range of section to analyze (default -: 0)\n    loff:\n        offset to starting line of section to analyze (default -: 0)\n    nr:\n        number of range pixels to analyze (default -: to end of line)\n    nl:\n        number of azimuth lines to analyze (default -: to end of file)\n    plist_out:\n        point list of points used to determine calibration using MSR_cal and PWR_cal thresholds\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/radcal_pwr_stat', SLC_tab, SLC_tab_cal, plist, MSR_cal, PWR_cal, roff, loff, nr, nl, plist_out]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef radcal_SLC(SLC, SLC_par, CSLC, CSLC_par, fcase='-', antenna='-', rloss_flag='-', ant_flag='-', refarea_flag='-', sc_dB='-', K_dB='-', pix_area='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Radiometric calibration of SLC data\n    | Copyright 2023, Gamma Remote Sensing, v2.8 6-Jul-2023 uw/clw/of\n    \n    Parameters\n    ----------\n    SLC:\n        (input) SLC (FCOMPLEX or SCOMPLEX)\n    SLC_par:\n        (input) SLC parameter file of input SLC\n    CSLC:\n        (output) radiometrically calibrated SLC (FCOMPLEX or SCOMPLEX)\n    CSLC_par:\n        (output) SLC parameter file of output calibrated SLC\n    fcase:\n        format case (enter - for default)\n            * 1: FCOMPLEX --> FCOMPLEX (pairs of FLOAT) (default)\n            * 2: FCOMPLEX --> SCOMPLEX (pairs of SHORT INTEGER)\n            * 3: SCOMPLEX --> FCOMPLEX\n            * 4: SCOMPLEX --> SCOMPLEX\n    \n    antenna:\n        1-way antenna gain pattern file (enter - for none)\n    rloss_flag:\n        range spreading loss correction (enter - for default)\n            * 0: no correction (default)\n            * 1: apply r^3 correction  (all modes except ASAR APS)\n            * 2: apply r^4 correction (used only for ASAR APS mode)\n            * -1: undo r^3 correction\n            * -2: undo r^4 correction\n    \n    ant_flag:\n        antenna pattern correction (enter - for default)\n            * 0: no correction (default)\n            * 1: apply antenna pattern correction\n            * -1: undo antenna pattern correction\n    \n    refarea_flag:\n        reference pixel area correction (enter - for default)\n            * 0: no pixel area correction (default)\n            * 1: calculate sigma0, scale area by sin(inc_ang)/sin(ref_inc_ang)\n            * 2: calculate gamma0, scale area by sin(inc_ang)/(cos(inc_ang)\\\\*sin(ref_inc_ang)\n            * -1: undo sigma0 area scaling factor\n            * -2: undo gamma0 area scaling factor\n    \n    sc_dB:\n        scale factor in dB (enter - for default: 0.0)\n    K_dB:\n        calibration factor in dB (enter - for default: value from SLC_par)\n    pix_area:\n        (output) ellipsoid-based ground range sigma0 or gamma0 pixel reference area (FLOAT) (enter - for none)\n            refarea_flag 1 or -1: sigma0 ref. area\n            refarea_flag 2 or -2: gamma0 ref. area\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/radcal_SLC', SLC, SLC_par, CSLC, CSLC_par, fcase, antenna, rloss_flag, ant_flag, refarea_flag, sc_dB, K_dB, pix_area]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef rascc_mask(cc, pwr, width, start_cc='-', start_pwr='-', nlines='-', pixavr='-', pixavaz='-', cc_thres='-', pwr_thres='-', cc_min='-', cc_max='-', scale='-', exp='-', LR='-', rasf='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate phase unwrapping validity mask using correlation and intensity\n    | Copyright 2023, Gamma Remote Sensing, v2.2 19-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    cc:\n        (input) interferometric correlation image (FLOAT)\n    pwr:\n        (input) intensity image (FLOAT, enter - if not available)\n    width:\n        number of samples/row\n    start_cc:\n        starting line of coherence image (enter - for default: 1)\n    start_pwr:\n        starting line of intensity image (enter - for default: 1)\n    nlines:\n        number of lines to display (enter - or 0 for default: to end of file)\n    pixavr:\n        number of pixels to average in range (enter - for default: 1)\n    pixavaz:\n        number of pixels to average in azimuth (enter - for default: 1)\n    cc_thres:\n        coherence threshold for masking, pixels with cc < cc_thres are set to 0 (enter - for default: 0.0)\n    pwr_thres:\n        relative intensity threshold for masking, pixels with intensity < pwr_thres \\\\* average intensity are set to 0 (enter - for default: 0)\n    cc_min:\n        minimum coherence value used for color display (enter - for default: 0.1)\n    cc_max:\n        maximum coherence value used for color display (enter - for default: 0.9)\n    scale:\n        intensity display scale factor (enter - for default: 1.0)\n    exp:\n        intensity display exponent (enter - for default: 0.35)\n    LR:\n        image mirror flag (enter - for default)\n            * 1: normal (default)\n            * -1: mirror image\n    \n    rasf:\n        (output) image filename, extension determines the format, enter - for default: \\\\*.tif\n            \\\\*.bmp BMP format\n            \\\\*.ras Sun raster format\n            \\\\*.tif TIFF format\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/rascc_mask', cc, pwr, width, start_cc, start_pwr, nlines, pixavr, pixavaz, cc_thres, pwr_thres, cc_min, cc_max, scale, exp, LR, rasf]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef rascc_mask_thinning(ras_in, in_file, width, ras_out, nmax='-', thresholds='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Adaptive sampling reduction for phase unwrapping validity mask\n    | Copyright 2023, Gamma Remote Sensing, v1.7 19-Apr-2023 uw/clw\n    \n    Parameters\n    ----------\n    ras_in:\n        (input) validity mask (SUN/BMP/TIFF raster format 8-bit image)\n    in_file:\n        (input) file used for adaptive sampling reduction, e.g. correlation coefficient (float)\n    width:\n        number of samples/row of in_file\n    ras_out:\n        (output) validity mask with reduced sampling (8-bit SUN rasterfile or BMP format image)\n    nmax:\n        number of sampling reduction runs (enter - for default: 3)\n    thresholds:\n        a list of thresholds sorted from smallest to largest scale sampling reduction\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/rascc_mask_thinning', ras_in, in_file, width, ras_out, nmax, thresholds]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef res_map(hgt, gr, data, SLC_par, OFF_par, res_hgt, res_data, nr='-', naz='-', azps_res='-', loff='-', nlines='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Slant range to ground range transformation based on interferometric ground-range\n    | Copyright 2023, Gamma Remote Sensing, v2.6 18-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    hgt:\n        (input) height file in slant range geometry\n    gr:\n        (input) ground range file in slant range geometry\n    data:\n        (input) data file in slant range geometry (float) (intensity \\\\*.pwr or correlation \\\\*.cc)\n    SLC_par:\n        (input) ISP parameter file of reference SLC\n    OFF_par:\n        (input) offset/interferogram processing parameters\n    res_hgt:\n        (output) resampled height file in ground range geometry\n    res_data:\n        (output) resampled data file in ground range geometry\n    nr:\n        number of range samples for L.S. estimate (enter - for default: 7, must be odd)\n    naz:\n        number of azimuth samples for L.S. extimate (enter - for default: 7, must be odd)\n    azps_res:\n        azimuth output map sample spacing in meters (enter - for default: azimuth spacing)\n    loff:\n        offset to starting line for height calculations (enter - for default: 0)\n    nlines:\n        number of lines to calculate (enter - for default: to end of file)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/res_map', hgt, gr, data, SLC_par, OFF_par, res_hgt, res_data, nr, naz, azps_res, loff, nlines]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef residue(int, flag, width, xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Determine interferometric phase unwrapping residues\n    | Copyright 2023, Gamma Remote Sensing, v2.8 18-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    int:\n        (input) interferogram (fcomplex)\n    flag:\n        (input) flag file (unsigned char)\n    width:\n        number of samples/row\n    xmin:\n        offset to starting range pixel (enter - for default: 0)\n    xmax:\n        offset last range pixel (enter - for default: width-1)\n    ymin:\n        offset to starting azimuth row (enter - for default: 0)\n    ymax:\n        offset to last azimuth row (enter - for default: nlines-1)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/residue', int, flag, width, xmin, xmax, ymin, ymax]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef residue_cc(int, flag, width, xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Determine interferometric phase unwrapping residues considering low coherence regions\n    | Copyright 2023, Gamma Remote Sensing, v2.8 18-Apr-2023 clw/uw/ts\n    \n    Parameters\n    ----------\n    int:\n        (input) interferogram (fcomplex)\n    flag:\n        (input) flag file (unsigned char)\n    width:\n        number of samples/row\n    xmin:\n        offset to starting range pixel (enter - for default: 0)\n    xmax:\n        offset last range pixel (enter - for default: width-1)\n    ymin:\n        offset to starting azimuth row (enter - for default: 0)\n    ymax:\n        offset to last azimuth row (enter - for default: nlines-1)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/residue_cc', int, flag, width, xmin, xmax, ymin, ymax]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef RSAT2_vec(SLC_par, RSAT2_orb, nstate='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Extract Radarsat-2 state vectors from a definitive orbit file\n    | Copyright 2022, Gamma Remote Sensing, v1.1 clw/cm 7-Nov-2022\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) ISP image parameter file\n    RSAT2_orb:\n        Radarsat-2 definitive orbit data file available from MDA (orbit_number_def.orb)\n    nstate:\n        number of state vectors to extract (enter - for default: 9)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/RSAT2_vec', SLC_par, RSAT2_orb, nstate]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef S1_burstloc(annotation_XML, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Print Burst information found in the Sentinel-1 annotation file\n    | Copyright 2025, Gamma Remote Sensing, v1.4 3-Feb-2025 awi/cm\n    \n    Parameters\n    ----------\n    annotation_XML:\n        (input) Sentinel-1 L1 XML annotation file\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/S1_burstloc', annotation_XML]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef S1_ETAD_SLC(ETAD, SLC1_tab, SLC2_tab, OPOD='-', corr='-', phase='-', tropo='-', iono='-', tides='-', bistatic='-', Doppler='-', FM_rate='-', mode='-', order='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Read and apply Sentinel-1 Extended Timing Annotation Dataset (ETAD) to correct range and azimuth timings of Sentinel-1 SLC images\n    | Copyright 2025, Gamma Remote Sensing, v1.1 25-Jun-2025 cm\n    \n    Parameters\n    ----------\n    ETAD:\n        (input) ETAD directory (e.g. S1A_IW_ETA__AXDV_20240807T172347_20240807T172414_055110_06B719_202E.SAFE)\n            ETAD can be downloaded from https://dataspace.copernicus.eu/\n    SLC1_tab:\n        (input) SLC_tab of Sentinel-1 TOPS or Stripmap SLC (e.g. 20240807.SLC_tab)\n    SLC2_tab:\n        (output) SLC_tab of Sentinel-1 TOPS or Stripmap SLC with ETAD correction (e.g. 20240807.ETAD.SLC_tab)\n    OPOD:\n        replace state vectors by precision orbit data (OPOD) provided with ETAD data (enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    corr:\n        apply following timing corrections (enter - for default)\n            * 0: no correction\n            * 1: all corrections (default)\n            * 2: all corrections in range only\n            * 3: all corrections in azimuth only\n            * 4: select individual corrections (defined in subsequent options)\n    \n    phase:\n        apply phase corrections corresponding to the selected timing corrections in range (enter - for default)\n            * 0: no\n            * 1: yes (default)\n            * 2: yes, experimental mode (phase corrections written to file(s))\n    \n    tropo:\n        apply corrections for tropospheric delay in range (enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    iono:\n        apply corrections for ionospheric delay in range (enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    tides:\n        apply corrections for solid Earth tides (enter - for default)\n            * 0: no\n            * 1: yes, in range and azimuth (default)\n            * 2: range only\n            * 3: azimuth only\n    \n    bistatic:\n        apply corrections for bistatic azimuth shifts (enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    Doppler:\n        apply corrections for Doppler-induced range shifts (enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    FM_rate:\n        apply corrections for FM-rate mismatch azimuth shifts (enter - for default)\n            * 0: no\n            * 1: yes (default)\n    \n    mode:\n        complex data interpolation mode (enter - for default)\n            * 0: Lanczos (default)\n            * 1: B-spline\n    \n    order:\n        Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 5)\n            NOTES: - SLC1_tab and SLC2_tab or their contents can be the same files (the files will be overwritten in that case)\n            - if SLC2_tab doesn't exist, it will be automatically created with file names derived from SLC1_tab contents\n            - SLC_tab line entries:\n            - TOPS mode:      SLC   SLC_par   TOPS_par\n            - Stripmap mode:  SLC   SLC_par\n            - with [phase] = 1, phase corrections only use ionospheric delays and solid Earth tides in range direction\n            - with [phase] = 2, phase corrections also include compensation for tropospheric path delays\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/S1_ETAD_SLC', ETAD, SLC1_tab, SLC2_tab, OPOD, corr, phase, tropo, iono, tides, bistatic, Doppler, FM_rate, mode, order]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef S1_OPOD_vec(SLC_par, OPOD, nstate='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Extract Sentinel-1 OPOD state vectors and copy into the ISP image parameter file\n    | Copyright 2025, Gamma Remote Sensing, v1.8 23-Jan-2024 awi/clw/cm\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input/output) ISP SLC/MLI image parameter file\n    OPOD:\n        (input) Sentinel-1 OPOD orbit data file (AUX_POEORB or AUX_RESORB)\n            orbit files can be downloaded from https://s1qc.asf.alaska.edu/ or https://dataspace.copernicus.eu/\n    nstate:\n        number of state vectors to extract (enter - for default: include 60 sec extention at the start and end of the SLC data)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/S1_OPOD_vec', SLC_par, OPOD, nstate]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef sbi_filt(SLC1, SLC1_par, SLC2R_par, SLCf, SLCf_par, SLCb, SLCb_par, norm_sq, iwflg='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Azimuth filtering of SLC data to support split-beam interferometry to measure azimuth offsets\n    | Copyright 2023, Gamma Remote Sensing, v1.6 clw/cm 18-Apr-2023\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) SLC image (SCOMPLEX or FCOMPLEX format)\n    SLC1_par:\n        (input) SLC image parameter file\n    SLC2R_par:\n        (input) SLC2 ISP image parameter file for the co-registered image of the interferometric pair,\n            used to determine azimuth common-band for each output SLC (enter - for none)\n    SLCf:\n        (output) SLC image (forward-looking, FCOMPLEX format)\n    SLCf_par:\n        (output) SLC parameter file (forward-looking)\n    SLCb:\n        (output) SLC image (backward-looking, FCOMPLEX format)\n    SLCb_par:\n        (output) SLC parameter file (backward-looking)\n    norm_sq:\n        squint between beams as a fraction of the azimuth spectrum width (default: 0.5)\n    iwflg:\n        inverse weighting flag (enter - for default)\n            * 0: no compensation for azimuth spectrum weighting\n            * 1: compensate for the azimuth spectrum weighting (default)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/sbi_filt', SLC1, SLC1_par, SLC2R_par, SLCf, SLCf_par, SLCb, SLCb_par, norm_sq, iwflg]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef sbi_offset(sbi_unw, SLCf_par, SLCb_par, OFF_par, az_offset, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate azimuth offsets from unwrapped split-beam interferogram\n    | Copyright 2022, Gamma Remote Sensing, v1.1 8-Nov-2022\n    \n    Parameters\n    ----------\n    sbi_unw:\n        (input) unwrapped phase of split-beam interferogram (float)\n    SLCf_par:\n        (input) reference SLC parameter file (forward-looking)\n    SLCb_par:\n        (input) reference SLC parameter file (backward-looking)\n    OFF_par:\n        (input) offset parameter file\n    az_offset:\n        (output) azimuth offsets (m)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/sbi_offset', sbi_unw, SLCf_par, SLCb_par, OFF_par, az_offset]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ScanSAR_burst_copy(SLC, SLC_par, TOPS_par, SLC_out, SLC_out_par, burst_num, drflg='-', SLC_par2='-', dtype='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Copy selected burst from Sentinel-1 TOPS SLC to a file\n    | Copyright 2023, Gamma Remote Sensing, v2.1 18-Apr-2023 awi/clw/cm\n    \n    Parameters\n    ----------\n    SLC:\n        (input) ScanSAR mode burst SLC\n    SLC_par:\n        (input) SLC parameter file for the ScanSAR burst scene\n    TOPS_par:\n        (input) burst parameter file for the ScanSAR burst SLC\n    SLC_out:\n        (output) SLC file containing a single burst\n    SLC_out_par:\n        (output) SLC parameter file for the single burst\n    burst_num:\n        burst number of selected burst (1 -> number of bursts in the SLC)\n    drflg:\n        deramp phase flag (enter - for default)\n            * 0: no modification of the burst SLC phase (default)\n            * 1: subtract TOPS mode Doppler phase ramp for Sentinel-1 (deramp)\n    \n    SLC_par2:\n        (output) SLC parameter file for the single burst SLC with deramped phase (drflg: 1, enter - for none)\n    dtype:\n        output data type (enter - for default: same as input data):\n            * 0: FCOMPLEX\n            * 1: SCOMPLEX\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_burst_copy', SLC, SLC_par, TOPS_par, SLC_out, SLC_out_par, burst_num, drflg, SLC_par2, dtype]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ScanSAR_burst_corners(SLC_par, TOPS_par, KML='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate corner geographic coordinates of ScanSAR burst data and generate a KML with burst rectangles\n    | Copyright 2025, Gamma Remote Sensing, v1.5 10-Feb-2025 awi/rc/cw\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) SLC parameter file for the ScanSAR burst data\n    TOPS_par:\n        (input) ScanSAR burst parameter file\n    KML:\n        (output) KML output file\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_burst_corners', SLC_par, TOPS_par, KML]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ScanSAR_burst_MLI(SLC_tab, MLI_tab, rlks, azlks, bflg='-', SLCR_tab='-', MLI_dir='-', scale='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Generate MLI burst data from ScanSAR burst SLC data (Sentinel-1, RCM, and TSX)\n    | Copyright 2024, Gamma Remote Sensing v2.5 25-Jun-2024 clw/cm\n    \n    Parameters\n    ----------\n    SLC_tab:\n        (input) 3 column list of ScanSAR SLC, swaths are listed in order from near to far range\n            SLC_tab line entries:   SLC   SLC_par  TOPS_par\n    MLI_tab:\n        (output) 3 column list of MLI swaths listed in order from near to far range\n            MLI_tab line entries:   MLI   MLI_par  TOPS_par\n            * NOTE: if the MLI_tab does not yet exist, the file entries will be created with names derived from the SLC_tab entries\n    \n    rlks:\n        number of range looks  (1...80)\n    azlks:\n        number of azimuth look (1...20)\n    bflg:\n        burst window calculation flag (enter - for default)\n            * 0: use existing burst window parameters if they exist, otherwise calculate burst window parameters (default)\n            * 1: calculate burst window parameters from burst parameters and the number of range and azimuth looks\n    \n    SLCR_tab:\n        (input) 3 column list of the reference scene with swaths, listed in order from near to far range (enter - for none)\n            SLCR_tab line entries:   SLC    SLC_par   TOPS_par\n    MLI_dir:\n        directory for output burst MLI data, ignored if the MLI_tab already exists (enter - for default: current directory)\n    scale:\n        scale factor for output MLI (enter - for default: calculate from calibration gain in SLC parameter file)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_burst_MLI', SLC_tab, MLI_tab, rlks, azlks, bflg, SLCR_tab, MLI_dir, scale]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ScanSAR_burst_overlap(SLC_tab, root_name, rlks, azlks, mode='-', bflg='-', SLCR_tab='-', dburst='-', bound='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Extract and mosaic overlapping parts of ScanSAR / TOPS burst data\n    | Copyright 2023, Gamma Remote Sensing v1.8 18-Apr-2023 cm/clw/uw\n    \n    Parameters\n    ----------\n    SLC_tab:\n        (input) 3 column list of SLC, SLC_par, Sentinel-1 TOPS_par sorted in the order IW1, IW2, IW3...\n    root_name:\n        (output) output data root name (example: yyyymmdd_pp_overlap)\n    rlks:\n        number of range looks used to determine burst window boundaries\n    azlks:\n        number of azimuth looks used to determine burst window boundaries\n    mode:\n        output mode (enter - for default)\n            * 0: output data are mosaics, non-overlapping parts are set to 0 (default)\n            * 1: output data are mosaics, non-overlapping parts are written\n            * 2: output data are burst data containing only overlapping parts\n            * 3: output data is a polygon file with polygons encompassing overlapping areas in the SLC mosaic\n            * 4: output data is a polygon file with polygons encompassing overlapping areas in the MLI mosaic\n    \n    bflg:\n        burst window calculation flag (enter - for default)\n            * 0: use existing burst window parameters if they exist, otherwise calculate burst window parameters (default)\n            * 1: recalculate burst window parameters from burst parameters and the number of range and azimuth looks\n    \n    SLCR_tab:\n        (input) SLC_tab of the reference scene, 3 column list of SLC, SLC_par, TOPS_par sorted sorted in the order IW1, IW2, IW3 (enter - for none)\n            * NOTE: When generating a mosaic of a resampled SLC, the SLC_tab of the reference scene is required\n    \n    dburst:\n        delta burst number (1=overlap of subsequent bursts, enter - for default: 1)\n    bound:\n        boundary pixels in polygon (enter - for default: 0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_burst_overlap', SLC_tab, root_name, rlks, azlks, mode, bflg, SLCR_tab, dburst, bound]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ScanSAR_burst_to_mosaic(DATA_tab, mosaic, MLI_par, mflg='-', data_tab_ref='-', min_ovr='-', max_ovr='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate mosaic of multilook ScanSAR burst data (FLOAT or FCOMPLEX)\n    | Copyright 2023, Gamma Remote Sensing v2.5 18-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    DATA_tab:\n        (input) 3 column list of swaths in ML_DATA burst geometry listed in the order from near to far range\n            DATA_tab line entries:   DATA   MLI_par  TOPS_par\n            * NOTE: The data type (FLOAT or FCOMPLEX) is specified in the MLI_par and the burst parameters (TOPS_par) must agree\n    \n    mosaic:\n        (output) mosaic image from bursts in multi-look geometry\n    MLI_par:\n        (output) mosaic image parameter file\n    mflg:\n        mosaicking option flag (enter - for default)\n            * 0: no overlap between bursts or image swaths (default)\n            * 1: average data in the overlap between bursts and in the overlap between image swaths\n            * 2: average data in the overlap between bursts but not in the overlap between image swaths\n    \n    data_tab_ref:\n        (input) reference scene DATA_tab, 3 column list of DATA, MLI_par, TOPS_par listed in order from near to far range (enter - for none)\n            * NOTE: When generating a mosaic produced using data from a resampled scene, the MLI_tab of the reference scene is required\n    \n    min_ovr:\n        minimum number of overlapping bursts (using mflg = 1 or 2, enter - for default: 1)\n    max_ovr:\n        maximum number of overlapping bursts (using mflg = 1 or 2, enter - for default: unlimited)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_burst_to_mosaic', DATA_tab, mosaic, MLI_par, mflg, data_tab_ref, min_ovr, max_ovr]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ScanSAR_full_aperture_SLC(SLC1_tab, SLC2_tab, SLCR_tab='-', SLC2_dir='-', vmode='-', wflg='-', imode='-', order='-', n_ovr='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate continuous SLC data from ScanSAR burst data (Sentinel-1, RCM, and TSX)\n    | Copyright 2023, Gamma Remote Sensing v1.9 18-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    SLC1_tab:\n        (input) 3 column list of ScanSAR SLC swaths listed in order from near to far range\n            SLC1_tab line entries:   SLC   SLC_par  TOPS_par\n    SLC2_tab:\n        (input/output) 3 column list of oversampled continuous SLC swaths listed in order from near to far range\n            SLC2_tab line entries:   SLC   SLC_par\n            * NOTE: if the SLC2_tab does not yet exist, the file entries will be created with names derived from the SLC1_tab entries\n    \n    SLCR_tab:\n        (input) 3 column list of the reference scene with swaths, listed in order from near to far range (enter - for none)\n            SLCR_tab line entries:   SLC    SLC_par   TOPS_par\n    SLC2_dir:\n        directory for output oversampled continuous SLC, ignored if the SLC2_tab already exists (enter - or . for the current directory)\n    vmode:\n        sample validity mode (enter - for default):\n            * 0: all data in the burst are considered valid (default)\n            * 1: interpolate samples between the valid data bounds of the burst\n    \n    wflg:\n        burst window calculation flag (enter - for default):\n            * 0: use existing burst window parameters if they exist, otherwise calculate burst window parameters (default)\n            * 1: calculate burst window parameters from burst parameters and the number of range and azimuth looks\n    \n    imode:\n        interpolation mode (enter - for default):\n            * 0: Lanczos (default)\n            * 1: B-spline\n    \n    order:\n        Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 5)\n            dtype     output data type, (enter - for default: same as input data):\n            * 0: FCOMPLEX\n            * 1: SCOMPLEX\n    \n    n_ovr:\n        SLC oversampling factor, must be in the range 2 --> 32 (enter - for default: automatically calculated)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_full_aperture_SLC', SLC1_tab, SLC2_tab, SLCR_tab, SLC2_dir, vmode, wflg, imode, order, n_ovr]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ScanSAR_mosaic_to_burst(DATA, MLI_par, DATA_tab, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Resample image data in the MLI mosaic geometry to burst MLI geometry (FLOAT or FCOMPLEX)\n    | Copyright 2023, Gamma Remote Sensing v1.5 3-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    DATA:\n        (input) data in mosaic geometry (FLOAT or FCOMPLEX data type)\n    MLI_par:\n        image parameter file in mosaic geometry\n    DATA_tab:\n        3 column list of the output data in burst geometry, swaths are in order from near to far range\n            MLI_tab line entries:  DATA   MLI_par  TOPS_par\n            \n            * NOTE: 1.The burst MLI_par and TOPS_par files describing the output geometry must already exist\n              2.The data type (FLOAT or FCOMPLEX) specified in the MLI_par and the burst parameters (TOPS_par) must agree\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/ScanSAR_mosaic_to_burst', DATA, MLI_par, DATA_tab]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef slant_range(SLC_par, slr, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate slant range for every range sample\n    | Copyright 2022, Gamma Remote Sensing v1.2 8-Nov-2022 cw\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) SLC or MLI image parameter file\n    slr:\n        (output) slant range for every sample in the image (float)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/slant_range', SLC_par, slr]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_adf(SLC, ref_SLC, ref_SLC_par, SLC_filt, mode='-', alpha='-', nfft_r='-', nfft_az='-', r_step='-', az_step='-', mwin_r='-', mwin_az='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Adaptive filtering of SLC data based on the local PSD of a reference SLC image\n    | Copyright 2023, Gamma Remote Sensing, v1.4 18-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    SLC:\n        (input) SLC to be filtered (FCOMPLEX or SCOMPLEX)\n    ref_SLC:\n        (input) reference SLC\n    ref_SLC_par:\n        (input) reference SLC parameter file\n    SLC_filt:\n        (output) output filtered SLC using the power spectrum of the reference SLC\n    mode:\n        SLC filtering mode (enter - for default):\n            * 0: 1D range PSD filter\n            * 1: 1D azimuth PSD filter\n            * 2: 2D range PSD \\\\* azimuth PSD filter \n            * 3: 2D median-filtered PSD filtering (default)\n    \n    alpha:\n        exponent to apply to PSD value (enter - for default: 0.30)\n    nfft_r:\n        range filter FFT window size, 2\\\\*\\\\*N, 16->1024, (enter - for default: 128)\n    nfft_az:\n        azimuth filter FFT window size, 2\\\\*\\\\*N, 16->1024, (enter - for default: 128)\n    r_step:\n        range processing step (enter - for default: nfft_r/4)\n    az_step:\n        azimuth processing step (enter - for default: nfft_az/4)\n    mwin_r:\n        range median window size for median PSD filtering (enter - for default: 5)\n    mwin_az:\n        azimuth median window size for median PSD filtering (enter - for default: 5)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_adf', SLC, ref_SLC, ref_SLC_par, SLC_filt, mode, alpha, nfft_r, nfft_az, r_step, az_step, mwin_r, mwin_az]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_cat(SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, SLC3, SLC3_par, dopflg='-', iflg='-', phflg='-', gainflg='-', imode='-', order='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Concatenate a pair of SLC images with interpolation of the second scene\n    | Copyright 2024, Gamma Remote Sensing, v2.8 18-Jul-2024 clw/cm\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) SLC1 image (FCOMPLEX or SCOMPLEX)\n    SLC2:\n        (input) SLC2 image to be appended to SLC1 (same type as SLC1)\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset parameter file containing offset polynomials between SLC1 and SLC2\n    SLC3:\n        (output) concatenated SLC\n    SLC3_par:\n        (output) ISP image parameter file for concatenated image\n    dopflg:\n        Doppler flag (enter - for default)\n            * 0: ignore Doppler centroid information, assume 0 Hz Doppler centroid\n            * 1: use Doppler centroid information for interpolation (default)\n    \n    iflg:\n        input data type flag (enter - for default)\n            * 0: input data are SLC images, use data type specified in SLC_par files (SCOMPLEX or FCOMPLEX) (default)\n            * 1: input scenes are interferograms, force FCOMPLEX data type\n    \n    phflg:\n        phase offset correction flag (enter - for default)\n            * 0: no phase offset correction for SLC2 (default)\n            * 1: apply constant phase offset correction to SLC2\n    \n    gainflg:\n        gain correction flag (enter - for default)\n            * 0: no gain correction for SLC2 (default)\n            * 1: apply gain correction to SLC2 using calibration gain values in parameter files\n            * 2: apply gain correction to SLC2 using relative intensity of overlap areas\n    \n    imode:\n        interpolation mode for SLC2 (enter - for default)\n            * 0: Lanczos interpolation (default)\n            * 1: B-spline interpolation\n    \n    order:\n        Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_cat', SLC1, SLC2, SLC1_par, SLC2_par, OFF_par, SLC3, SLC3_par, dopflg, iflg, phflg, gainflg, imode, order]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_cat_ScanSAR(SLC_tab1, SLC_tab2, SLC_tab3, bin_flag='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Concatenate sequential ScanSAR burst SLC images\n    | Copyright 2024, Gamma Remote Sensing v3.5 5-Mar-2024 clw/cm\n    \n    Parameters\n    ----------\n    SLC_tab1:\n        (input) 3 column list of ScanSAR SLC, swaths are listed in order from near to far range (earlier time)\n            SLC_tab line entries:   SLC   SLC_par  TOPS_par\n    SLC_tab2:\n        (input) 3 column list of ScanSAR SLC, swaths are listed in order from near to far range (later time)\n            SLC_tab line entries:   SLC   SLC_par  TOPS_par\n    SLC_tab3:\n        (input) 3 column list of concatenated ScanSAR SLC, swaths are listed in order from near to far range\n            SLC_tab line entries:   SLC   SLC_par  TOPS_par\n    bin_flag:\n        binary data flag (enter - for default)\n            * 0: no binary data generated (concatenate parameter files only)\n            * 1: binary data generated (default)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_cat_ScanSAR', SLC_tab1, SLC_tab2, SLC_tab3, bin_flag]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_copy(SLC_in, SLC_par_in, SLC_out, SLC_par_out, fcase='-', sc='-', roff='-', nr='-', loff='-', nl='-', swap='-', header_lines='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Copy SLC with options for data format conversion, segment extraction, swap real and imaginary, swap near and far range,  and azimuth spectrum shift\n    | Copyright 2023, Gamma Remote Sensing, v6.1 1-May-2023 uw/clw/cm/of\n    \n    Parameters\n    ----------\n    SLC_in:\n        (input) SLC (FCOMPLEX or SCOMPLEX format)\n    SLC_par_in:\n        (input) ISP SLC parameter file for input SLC\n    SLC_out:\n        (output) selected SLC section (FCOMPLEX or SCOMPLEX format)\n    SLC_par_out:\n        (output) ISP SLC parameter file of output SLC\n    fcase:\n        data format conversion (enter - for default: output format = input format)\n            * 1: FCOMPLEX --> FCOMPLEX (default sc = 1.0)\n            * 2: FCOMPLEX --> SCOMPLEX (default sc = 10000.0)\n            * 3: SCOMPLEX --> FCOMPLEX (default sc = 0.0001)\n            * 4: SCOMPLEX --> SCOMPLEX (default sc = 1.0)\n    \n    sc:\n        scale factor for input SLC data (enter - for default)\n    roff:\n        offset to starting range sample (enter - for default: 0)\n    nr:\n        number of range samples (enter - for default: to end of line)\n    loff:\n        offset to starting line (enter - for default: 0)\n    nl:\n        number of lines to copy (enter - for default: to end of file)\n    swap:\n        swap data (enter - for default)\n            * 0: normal (default)\n            * 1: swap real/imaginary part of complex data\n            * 2: swap left/right (near/far range)\n            * 3: shift the SLC azimuth spectrum by 1/2 the azimuth sample rate\n    \n    header_lines:\n        number of input file header lines (enter - for default: 0)\n            * NOTE: CEOS format SLC data have 1 header line\n            * NOTE: file offset pointer size (bytes): 8\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_copy', SLC_in, SLC_par_in, SLC_out, SLC_par_out, fcase, sc, roff, nr, loff, nl, swap, header_lines]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_copy_ScanSAR(SLC1_tab, SLC2_tab, BURST_tab, dtype='-', SLC2_dir='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Burst selection and copy from ScanSAR burst data (FCOMPLEX, SCOMPLEX)\n    | Copyright 2024, Gamma Remote Sensing v3.7 29-Feb-2024 clw/cm\n    \n    Parameters\n    ----------\n    SLC1_tab:\n        (input) 3 column list of ScanSAR SLC1 swaths in order from near to far range\n            SLC1_tab line entries:   SLC    SLC_par   TOPS_par\n    SLC2_tab:\n        (input/output) 3 column list of the burst data copied from the ScanSAR swaths listed in SLC1_tab, in order from near to far range\n            SLC2_tab line entries:   SLC    SLC_par   TOPS_par\n            \n            * NOTE: If the SLC2_tab does not yet exist, the SLC2_tab will be created with file names derived from the SLC1_tab entries and the SLC2_dir\n              The new file names will have _2 appended to the root file names of the entries in SLC1_tab\n    BURST_tab:\n        (input) 2 column list of the first and last burst to copy from each swath, one line for each swath\n            BURST_tab line entries: first_burst  last_burst\n            NOTES: 1. The first burst is 1, enter - to select last physical burst\n            2. If first_burst <= 0, then blank bursts are generated at the start of the output swath\n            3. If last_burst exceeds the number of bursts, then blank bursts are appended to the end of the output swath\n    dtype:\n        output data format for complex data (enter - for default: output data has the same format as input data):\n            * 0: FCOMPLEX\n            * 1: SCOMPLEX\n    \n    SLC2_dir:\n        directory for ScanSAR burst data copied from SLC1 data, ignored if the SLC2_tab already exists (enter - for default: current directory)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_copy_ScanSAR', SLC1_tab, SLC2_tab, BURST_tab, dtype, SLC2_dir]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_corners(SLC_par, terra_alt='-', kml='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate SLC/MLI image corners in geodetic latitude and longitude (deg.)\n    | Copyright 2022, Gamma Remote Sensing, v2.2 8-Nov-2022 clw/awi/cm\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input) ISP SLC/MLI image parameter file\n    terra_alt:\n        (input) average terrain altitude (enter - for default: 300.000 meters)\n    kml:\n        (output) kml output file (enter - for none)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_corners', SLC_par, terra_alt, kml]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_deramp(SLC1, SLC1_par, SLC2, SLC2_par, mode, dop_ph='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate and subtract Doppler phase from an SLC image\n    | Copyright 2023, Gamma Remote Sensing, v1.7 18-Apr-2023 clw\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) SLC data file (FCOMPLEX or SCOMPLEX format)\n    SLC1_par:\n        (input) SLC parameter file with Doppler information\n    SLC2:\n        (output) SLC with Doppler phase removed (or added)\n    SLC2_par:\n        (output) SLC parameter file for the output SLC\n    mode:\n        mode of operation:\n            * 0: subtract Doppler phase ramp (deramp)\n            * 1: add Doppler phase ramp (reramp)\n    \n    dop_ph:\n        (output) Doppler phase (FLOAT) (enter - for none)\n            Note: SLC1_par contains the Doppler polynomial that is used to calculate the Doppler phase ramp\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_deramp', SLC1, SLC1_par, SLC2, SLC2_par, mode, dop_ph]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_deramp_ScanSAR(SLC1_tab, SLC2_tab, mode, phflg='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate and subtract ScanSAR or TOPS Doppler phase from burst SLC data\n    | Copyright 2023, Gamma Remote Sensing, v2.1 18-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    SLC1_tab:\n        (input) 3 column list of input ScanSAR SLC, swaths are listed in order from near to far range:\n            SLC_tab line entries:   SLC    SLC_par   TOPS_par\n    SLC2_tab:\n        (input) 3 column list of output ScanSAR SLC, swaths are listed in order from near to far range\n    mode:\n        mode of operation:\n            * 0: subtract ScanSAR Doppler phase (deramp)\n            * 1: add Doppler phase ramp (reramp)\n    \n    phflg:\n        deramp phase flag (enter - for default)\n            * 0: do not save ScanSAR Doppler phase (default)\n            * 1: save ScanSAR Doppler phase, output filename is the same as the deramped SLC with extension .dph\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_deramp_ScanSAR', SLC1_tab, SLC2_tab, mode, phflg]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_deskew(SLC1, SLC1_par, SLC2, SLC2_par, mode='-', interp='-', order='-', deramp='-', ph_corr='-', sr0='-', sr2='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Change geometry from Doppler centroid to zero-Doppler (deskew) or vice-versa\n    | Copyright 2024, Gamma Remote Sensing, v1.6 17-Oct-2024 cm/clw/uw\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) SLC image file (FCOMPLEX or SCOMPLEX format)\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2:\n        (output) SLC image file in new geometry\n    SLC2_par:\n        (output) SLC2 ISP image parameter file\n    mode:\n        mode of operation (enter - for default)\n            * 0: change geometry from Doppler centroid to zero-Doppler (deskew, default)\n            * 1: change geometry from zero-Doppler to Doppler centroid (reskew)\n    \n    interp:\n        interpolation method (enter - for default)\n            * 0: Lanczos interpolation (default)\n            * 1: B-spline interpolation\n    \n    order:\n        Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)\n    deramp:\n        deramp flag (enter - for default)\n            * 0: do not deramp and reramp data\n            * 1: deramp data before interpolation and reramp afterwards (default)\n    \n    ph_corr:\n        range shift phase correction flag (enter - for default)\n            * 0: do not correct phase related to range shift\n            * 1: correct phase related to range shift (default)\n    \n    sr0:\n        near range distance of the resampled image in meter (enter - for default: calculated from input)\n    sr2:\n        far range distance of the resampled image in meter (enter - for default: calculated from input)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_deskew', SLC1, SLC1_par, SLC2, SLC2_par, mode, interp, order, deramp, ph_corr, sr0, sr2]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_freq_shift(SLC, SLC_par, SLC_shift, SLC_shift_par, freq_shift, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP Program GAMMA_SOFTWARE-20250625/ISP/bin/SLC_freq_shift\n    | Shift the effective radar carrier frequency of an SLC image by a specified amount\n    | Copyright 2022, Gamma Remote Sensing, v1.1 8-Nov-2022 clw\n    \n    Parameters\n    ----------\n    SLC:\n        (input) SLC file (FCOMPLEX or SCOMPLEX)\n    SLC_par:\n        (input) SLC parameter file\n    SLC_shift:\n        (output) SLC data with shifted radar carrier frequency\n    SLC_shift_par:\n        (output) SLC parameter file with shifted radar carrier frequency\n    freq_shift:\n        radar carrier frequency shift (Hz)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_freq_shift', SLC, SLC_par, SLC_shift, SLC_shift_par, freq_shift]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_interp(SLC2, SLC1_par, SLC2_par, OFF_par, SLC2R, SLC2R_par, loff='-', nlines='-', mode='-', order='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | SLC complex image resampling using 2-D Lanczos or B-spline interpolation\n    | Copyright 2023, Gamma Remote Sensing, v4.9 18-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    SLC2:\n        (input) SLC2 image to be resampled to the geometry of the SLC1 reference image\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    SLC2R:\n        (output) single-look complex image 2 coregistered to SLC1\n    SLC2R_par:\n        (output) SLC2R ISP image parameter file for coregistered image\n    loff:\n        offset to first valid output line (in SLC1 lines) (enter - for default: 0)\n    nlines:\n        number of valid output lines (enter - or 0 for default: to end of file)\n    mode:\n        interpolation mode (enter - for default)\n            * 0: Lanczos (default)\n            * 1: B-spline\n    \n    order:\n        Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_interp', SLC2, SLC1_par, SLC2_par, OFF_par, SLC2R, SLC2R_par, loff, nlines, mode, order]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_interp_map(SLC2, SLC1_par, SLC2_par, OFF_par, SLC2R, SLC2R_par, OFF_par2, coffs_sm, loff='-', nlines='-', mode='-', order='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | SLC image resampling using a 2-D offset map\n    | Copyright 2024, Gamma Remote Sensing, v4.3 22-Aug-2024 clw/uw/cm\n    \n    Parameters\n    ----------\n    SLC2:\n        (input) SLC2 image to be resampled to the reference SLC1 reference image\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2_par:\n        (input) SLC2 ISP image parameter file\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    SLC2R:\n        (output) single-look complex image 2 coregistered to SLC1\n    SLC2R_par:\n        (output) SLC2R ISP image parameter file for co-registered image\n    OFF_par2:\n        (input) ISP offset/interferogram parameter file used for residual offsets map (coffs_sm)\n    coffs_sm:\n        (input) smoothed residual range and azimuth offsets (fcomplex)\n    loff:\n        offset to first valid output line (in SLC1 lines) (enter - for default: 0)\n    nlines:\n        number of valid output lines (enter - or 0 for default: to end of file)\n    mode:\n        interpolation mode (enter - for default)\n            * 0: Lanczos (default)\n            * 1: B-spline\n    \n    order:\n        Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_interp_map', SLC2, SLC1_par, SLC2_par, OFF_par, SLC2R, SLC2R_par, OFF_par2, coffs_sm, loff, nlines, mode, order]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_interp_ScanSAR(SLC2_tab, SLC2_par, SLC1_tab, SLC1_par, OFF_par, SLC2R_tab, SLC2R='-', SLC2R_par='-', mode='-', order='-', SLC2R_dir='-', burst_check='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Resample ScanSAR burst mode SLC using global offset polynomial\n    | Copyright 2025, Gamma Remote Sensing v4.4 28-Jan-2025 clw/cm\n    \n    Parameters\n    ----------\n    SLC2_tab:\n        (input) 3 column list of ScanSAR SLC2 swaths to be resampled into the geometry of SLC1 listed in order from near to far range\n            SLC2_tab line entries:   SLC    SLC_par   TOPS_par\n    SLC2_par:\n        (input) SLC parameter file of ScanSAR SLC2 mosaic, SLC2 is generated from the ScanSAR swaths listed in SLC2_tab\n    SLC1_tab:\n        (input) 3 column list of the reference ScanSAR SLC swaths listed in order from near to far range\n    SLC1_par:\n        (input) SLC parameter file of the reference ScanSAR SLC1 mosaic, SLC1 is generated from the ScanSAR swaths listed in SLC1_tab\n    OFF_par:\n        (input) global ISP offset and interferogram parameter file, the offset model is determined from the ScanSAR SLC mosaics\n            * NOTE: The OFF_par specifies the number of range and azimuth looks required to determine valid data bounds (burst windows)\n    \n    SLC2R_tab:\n        (input/output) 3 column list of the resampled ScanSAR SLC2 swaths listed in order from near to far range\n            * NOTE: If the SLC2R_tab does not yet exist, the entires will be created with file names derived from the filenames in SLC2_tab and the SLC2R_dir\n              The file extensions of the new entries are changed from slc to rslc\n    SLC2R:\n        (output) mosaic generated from the resampled swaths listed in SLC2R_tab, coregistered to the reference mosaic of SLC1 (enter - for none)\n    SLC2R_par:\n        (output) SLC parameter file associated with the mosaic created from the resampled swaths SLC2R (enter - for none)\n    mode:\n        complex data interpolation mode (enter - for default)\n            * 0: Lanczos (default)\n            * 1: B-spline\n    \n    order:\n        Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)\n    SLC2R_dir:\n        directory for resampled burst SLC2R data, ignored if the DIFF_tab already exists (enter - for default: current directory)\n    burst_check:\n        check and update burst parameters to match actual data (enter - for default)\n            * 0: no (default)\n            * 1: yes\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_interp_ScanSAR', SLC2_tab, SLC2_par, SLC1_tab, SLC1_par, OFF_par, SLC2R_tab, SLC2R, SLC2R_par, mode, order, SLC2R_dir, burst_check]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_intf(SLC1, SLC2R, SLC1_par, SLC2R_par, OFF_par, interf, rlks, azlks, loff='-', nlines='-', sps_flg='-', azf_flg='-', rp1_flg='-', rp2_flg='-', SLC1s='-', SLC2Rs='-', SLC_1s_par='-', SLC_2Rs_par='-', az_beta='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate interferogram from co-registered SLC image data\n    | Copyright 2024, Gamma Remote Sensing, v6.3 8-Mar-2024 clw/uw/cm\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) single-look complex image 1 (reference)\n    SLC2R:\n        (input) single-look complex image 2 coregistered to SLC1\n    SLC1_par:\n        (input) SLC1 ISP image parameter file\n    SLC2R_par:\n        (input) SLC2R ISP image parameter file for the co-registered image\n    OFF_par:\n        (input) ISP offset/interferogram parameter file\n    interf:\n        (output) interferogram from SLC1 and SLC2R\n    rlks:\n        number of range looks\n    azlks:\n        number of azimuth looks\n    loff:\n        offset to starting line relative to SLC1 for interferogram (enter - for default: 0)\n    nlines:\n        number of SLC lines to process (enter - for default: to end of file)\n    sps_flg:\n        range spectral shift flag (enter - for default)\n            * 1: apply range spectral shift filter (default)\n            * 0: do not apply range spectral shift filter\n    \n    azf_flg:\n        azimuth common band filter flag (enter - for default)\n            * 1: apply azimuth common-band filter (default)\n            * 0: do not apply azimuth common band filter\n    \n    rp1_flg:\n        SLC1 image range phase mode (enter - for default)\n            * 0: non-zero Doppler geometry\n            * 1: zero-Doppler geometry (default)\n    \n    rp2_flg:\n        SLC2 image range phase mode (enter - for default)\n            * 0: non-zero Doppler geometry\n            * 1: zero-Doppler geometry (default)\n    \n    SLC1s:\n        SLC1 after range spectral shift and azimuth common-band filtering (FCOMPLEX format) (enter - for none)\n    SLC2Rs:\n        SLC2R after range spectral shift and azimuth common-band filtering (FCOMPLEX format) (enter - for none)\n    SLC_1s_par:\n        SLC1s ISP image parameter file (enter - for none)\n    SLC_2Rs_par:\n        SLC2Rs ISP image parameter file (enter - for none)\n    az_beta:\n        azimuth common-band filter Kaiser window parameter (enter - for default: 2.120)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_intf', SLC1, SLC2R, SLC1_par, SLC2R_par, OFF_par, interf, rlks, azlks, loff, nlines, sps_flg, azf_flg, rp1_flg, rp2_flg, SLC1s, SLC2Rs, SLC_1s_par, SLC_2Rs_par, az_beta]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_intf2(SLC1, SLC2R, SLC1_par, SLC2R_par, MLI1, MLI2R, MLI1_par, MLI2R_par, interf, cc, r_dec, az_dec, rwin='-', azwin='-', wflg='-', n_ovr='-', sim_phase='-', lanczos='-', beta='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate interferogram and MLI images from SLCs with separate averaging window dimensions and decimation factors\n    | Copyright 2025, Gamma Remote Sensing, v2.3 20-May-2025 clw/cm/of\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) single-look complex image 1 (reference)\n    SLC2R:\n        (input) single-look complex image 2 coregistered to SLC1\n    SLC1_par:\n        (input) SLC1 image parameter file\n    SLC2R_par:\n        (input) SLC2R image parameter file for the co-registered image\n    MLI1:\n        (output) multi-look intensity image derived from SLC1 (enter - for none)\n    MLI2R:\n        (output) multi-look intensity image derived from SLC2R (enter - for none)\n    MLI1_par:\n        (output) MLI image parameter file derived from SLC1_par (enter - for none)\n    MLI2R_par:\n        (output) MLI image parameter file derived from SLC2R_par (enter - for none)\n    interf:\n        (output) complex interferogram from SLC1 and SLC2R  (enter - for none)\n    cc:\n        (output) interferometric correlation magnitude of SLC1 and SLC2R (enter - for none)\n    r_dec:\n        range decimation factor (int)\n    az_dec:\n        azimuth decimation factor (int)\n    rwin:\n        averaging window width (int) (enter - for default: r_dec)\n    azwin:\n        averaging window height (int) (enter - for default: az_dec)\n    wflg:\n        window weighting function (enter - for default):\n            * 0: rectangular (default)\n            * 1: Kaiser\n            * 2: circular Gaussian \n    \n    n_ovr:\n        oversampling factor 1 -> 2 (enter - for default: 1)\n    sim_phase:\n        (input) simulated interferometric phase, coregistered MLI1 (FLOAT, enter - for none)\n    lanczos:\n        Lanczos interpolator order 5 -> 9 (enter - for default: 7)\n    beta:\n        Gaussian or Kaiser window parameter (enter - for default: 2.0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_intf2', SLC1, SLC2R, SLC1_par, SLC2R_par, MLI1, MLI2R, MLI1_par, MLI2R_par, interf, cc, r_dec, az_dec, rwin, azwin, wflg, n_ovr, sim_phase, lanczos, beta]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_mosaic_range(SLC_tab, SLC, SLC_par, mode='-', order='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate mosaic of Stripmap SLC data provided in multiple pieces in range direction (e.g. PALSAR-3)\n    | Copyright 2025, Gamma Remote Sensing v1.1 5-Feb-2025 cm/clw/uw\n    \n    Parameters\n    ----------\n    SLC_tab:\n        (input) 2 column list of Stripmap SLC pieces (from near to far range)\n            SLC_tab line entries:   SLC   SLC_par\n    SLC:\n        (output) SLC mosaic image\n    SLC_par:\n        (output) SLC mosaic image parameter file\n    mode:\n        complex data interpolation mode in range (enter - for default)\n            * 0: Lanczos (default)\n            * 1: B-spline\n            * 2: nearest neighbor\n    \n    order:\n        Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_mosaic_range', SLC_tab, SLC, SLC_par, mode, order]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_mosaic_ScanSAR(SLC_tab, SLC, SLC_par, rlks, azlks, bflg='-', SLCR_tab='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Calculate SLC mosaic of ScanSAR SLC burst data (Sentinel-1, TerraSAR-X, RCM...)\n    | Copyright 2025, Gamma Remote Sensing v5.0 14-Jan-2025 clw/awi/cm\n    \n    Parameters\n    ----------\n    SLC_tab:\n        (input) 3 column list of ScanSAR SLC, swaths are listed in order from near to far range\n            SLC_tab line entries:   SLC   SLC_par   TOPS_par\n    SLC:\n        (output) SLC mosaic image\n    SLC_par:\n        (output) SLC mosaic image parameter file\n    rlks:\n        number of range looks used to determine burst window boundaries for the mosaic\n    azlks:\n        number of azimuth looks used to determine burst window boundaries for the mosaic\n    bflg:\n        burst window calculation flag (enter - for default)\n            * 0: use existing burst window parameters if they exist, otherwise calculate burst window parameters (default)\n            * 1: calculate burst window parameters from burst parameters and the number of range and azimuth looks\n    \n    SLCR_tab:\n        (input) 3 column list of the reference scene, swaths are listed in order from near to far range (enter - for none)\n            SLCR_tab line entries:   SLC   SLC_par   TOPS_par\n            * NOTE: When generating a mosaic of a resampled SLC, the SLC_tab of the reference scene is required\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_mosaic_ScanSAR', SLC_tab, SLC, SLC_par, rlks, azlks, bflg, SLCR_tab]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_ovr(SLC, SLC_par, SLC_ovr, SLC_ovr_par, r_ovr='-', az_ovr='-', mode='-', order='-', deramp='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Oversample SLC data in range and azimuth using 2-D Lanczos or B-spline interpolation\n    | Copyright 2024, Gamma Remote Sensing, v1.6 1-Feb-2024 clw/cm\n    \n    Parameters\n    ----------\n    SLC:\n        (input) SLC image  (FCOMPLEX or SCOMPLEX format)\n    SLC_par:\n        (input) SLC image parameter file\n    SLC_ovr:\n        (output) oversampled SLC image\n    SLC_ovr_par:\n        (output) oversampled SLC image parameter file\n    r_ovr:\n        range oversampling factor (enter - for default: 1.0)\n    az_ovr:\n        azimuth oversampling factor (enter - for default: 1.0)\n    mode:\n        interpolation mode (enter - for default)\n            * 0: Lanczos interpolation (default)\n            * 1: B-spline interpolation\n    \n    order:\n        Lanczos interpolator order / B-spline degree 4 -> 9 (enter - for default: 4)\n    deramp:\n        deramp flag (enter - for default)\n            * 0: do not deramp and reramp data\n            * 1: deramp data before interpolation and reramp afterwards (default)\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_ovr', SLC, SLC_par, SLC_ovr, SLC_ovr_par, r_ovr, az_ovr, mode, order, deramp]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_phase_shift(SLC1, SLC1_par, SLC2, SLC2_par, ph_shift, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Add a constant phase from an SLC image\n    | Copyright 2023, Gamma Remote Sensing, v1.3 24-Apr-2023 clw\n    \n    Parameters\n    ----------\n    SLC1:\n        (input) SLC data file (fcomplex or scomplex format)\n    SLC1_par:\n        (input) SLC parameter file\n    SLC2:\n        (output) SLC with phase shift\n    SLC2_par:\n        (output) SLC parameter file for the output SLC\n    ph_shift:\n        phase shift to add to SLC phase (radians)\n            * NOTE: Used to apply a constant phase shift of -1.25 radians to Sentinel-1 TOPS SLC data\n              from swath IW1 acquired up to 10-Mar-2015.\n              Used to apply a constant phase shift of -3.83 radians to Sentinel-1 TOPS SLC data with\n              H-POL on receive (e.g. VH) acquired up to 10-Mar-2015.\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_phase_shift', SLC1, SLC1_par, SLC2, SLC2_par, ph_shift]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_RFI_filt(SLC, SLC_par, SLC_filt, rfi_thres='-', nfft_r='-', nfft_az='-', r_step='-', az_step='-', mwin_r='-', mwin_az='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Adaptive RFI filtering for SLC image using median spectral filtering\n    | Copyright 2023, Gamma Remote Sensing, v1.6 18-Apr-2023 clw\n    \n    Parameters\n    ----------\n    SLC:\n        (input) SLC to be filtered (FCOMPLEX or SCOMPLEX)\n    SLC_par:\n        (input) reference SLC parameter file\n    SLC_filt:\n        (output) output filtered SLC using the power spectrum of the reference SLC\n    rfi_thres:\n        RFI threshold (enter - for default: 10.00)\n    nfft_r:\n        range filter FFT window size, 2\\\\*\\\\*N, 16->1024, (enter - for default: 128)\n    nfft_az:\n        azimuth filter FFT window size, 2\\\\*\\\\*N, 16->1024, (enter - for default: 128)\n    r_step:\n        range processing step (enter - for default: nfft_r/4)\n    az_step:\n        azimuth processing step (enter - for default: nfft_az/4)\n    mwin_r:\n        range median window size for median PSD filtering (enter - for default: 5)\n    mwin_az:\n        azimuth median window size for median PSD filtering (enter - for default: 5)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_RFI_filt', SLC, SLC_par, SLC_filt, rfi_thres, nfft_r, nfft_az, r_step, az_step, mwin_r, mwin_az]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_RFI_filt2(SLC, SLC_par, SLC_filt, rfi_thres='-', method='-', f_bs='-', bs_width='-', roff='-', nr='-', azoff='-', naz='-', pltflg='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | RFI filtering for SLC image using a band-stop filter\n    | Copyright 2024, Gamma Remote Sensing, v1.5 2-Feb-2024 cm\n    \n    Parameters\n    ----------\n    SLC:\n        (input) SLC to be filtered (FCOMPLEX or SCOMPLEX)\n    SLC_par:\n        (input) reference SLC parameter file\n    SLC_filt:\n        (output) output filtered SLC (same format as SLC)\n    rfi_thres:\n        RFI threshold in dB above reference (enter - for default: auto)\n    method:\n        RFI detection method (enter - for default)\n            * 0: threshold above median\n            * 1: threshold using spectrum symmetry (default)\n    \n    f_bs:\n        center or seed frequency of band-stop filter in Hz (-fadc/2.0 <= f_bs < fadc/2.0, enter - for default: auto)\n    bs_width:\n        width of band-stop filter in Hz (enter - for default: auto)\n    roff:\n        offset to starting range sample to filter (enter - for default: 0)\n    nr:\n        number of range samples to filter (enter - for default: to end of line)\n    azoff:\n        offset to starting azimuth line to filter (enter - for default: 0)\n    naz:\n        number of azimuth lines to filter (enter - for default: to end of file)\n    pltflg:\n        range spectrum plotting flag (enter - for default)\n            * 0: none\n            * 1: output plot in PNG format (default)\n            * 2: screen output plot\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SLC_RFI_filt2', SLC, SLC_par, SLC_filt, rfi_thres, method, f_bs, bs_width, roff, nr, azoff, naz, pltflg]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef split_WB(data_in, data_par_in, data_tab, dtype, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP: Program GAMMA_SOFTWARE-20250625/ISP/bin/split_WB\n    | Split WB mosaic image into individual beams using ISP parameter files\n    | Copyright 2022, Gamma Remote Sensing, v1.4 8-Nov-2022 clw/cm\n    \n    Parameters\n    ----------\n    data_in:\n        (input) input mosaicked data in slant-range geometry (e.g. DEM data)\n    data_par_in:\n        (input) ISP image parameter file for data in the input mosaic\n    data_tab:\n        (input) 2 column list of output data filenames and ISP image parameter files for each beam in the mosaic (text)\n    dtype:\n        (input) input data type:\n            * 0: FLOAT\n            * 1: FCOMPLEX\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/split_WB', data_in, data_par_in, data_tab, dtype]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SR_to_GRD(MLI_par, OFF_par, GRD_par, in_file, out_file, rlks='-', azlks='-', interp_mode='-', grd_rsp='-', grd_azsp='-', degree='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Conversion to ground range for ISP MLI and INSAR data of type FLOAT\n    | Copyright 2023, Gamma Remote Sensing, v2.5 18-Apr-2023 uw/clw/cm\n    \n    Parameters\n    ----------\n    MLI_par:\n        (input) MLI image parameter file of the slant-range image\n    OFF_par:\n        (input) ISP OFF_par of the input image (enter - when the image geometry specified by the MLI_par)\n    GRD_par:\n        (input/output) image parameter file of output ground range image\n    in_file:\n        (input) slant range image (FLOAT)\n    out_file:\n        (output) ground range image (FLOAT)\n    rlks:\n        multi-looking in range (prior to resampling, enter - for default: 1)\n    azlks:\n        multi-looking in azimuth (prior to resampling, enter - for default: 1)\n    interp_mode:\n        interpolation mode (enter - for default)\n            * 0: nearest-neighbor\n            * 1: bicubic spline\n            * 2: bicubic spline log(x)\n            * 3: bicubic spline sqrt(x)\n            * 4: B-spline interpolation (default B-spline degree: 3)\n            * 5: B-spline interpolation sqrt(x) (default) (default B-spline degree: 3)\n            * NOTE: log and sqrt interpolation modes should only be used with non-negative data!\n    \n    grd_rsp:\n        output image ground range sample spacing (m) (enter - for default: (input image azimuth spacing) \\\\* azlks)\n    grd_azsp:\n        output image azimuth sample spacing (m) (enter - for default: (input image azimuth spacing) \\\\* azlks)\n    degree:\n        B-spline degree (2->9) (enter - for default: 3)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/SR_to_GRD', MLI_par, OFF_par, GRD_par, in_file, out_file, rlks, azlks, interp_mode, grd_rsp, grd_azsp, degree]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef subtract_phase(interf_in, phase_file, interf_out, width, factor='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ISP: Program GAMMA_SOFTWARE-20250625/ISP/bin/subtract_phase\n    | Subtract scaled phase image from a complex interferogram\n    | Copyright 2023, Gamma Remote Sensing, v3.3 19-Apr-2023 uw/clw\n    \n    Parameters\n    ----------\n    interf_in:\n        (input) input interferogram (FCOMPLEX)\n    phase_file:\n        (input) unwrapped interferometric phase (FLOAT)\n    interf_out:\n        (output) output interferogram (input interferogram - scaled phase) (FCOMPLEX)\n    width:\n        number of samples/line\n    factor:\n        constant scale factor for input phase data (enter - for default: 1.0)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/subtract_phase', interf_in, phase_file, interf_out, width, factor]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef tree_cc(flag, width, mbl='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Phase unwrapping tree generation with low correlation search (modified ARW algorithm)\n    | Copyright 2023, Gamma Remote Sensing, v3.1 18-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    flag:\n        (input) phase unwrapping flag file\n    width:\n        number of samples/row\n    mbl:\n        maximum branch length (enter - for default: 32, maximum=64) \n    xmin:\n        starting range pixel offset (enter - for default: 0)\n    xmax:\n        last range pixel offset (enter - for default: width-1)\n    ymin:\n        starting azimuth row, relative to start (enter - for default: 0)\n    ymax:\n        last azimuth row, relative to start (enter - for default: nlines-1)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/tree_cc', flag, width, mbl, xmin, xmax, ymin, ymax]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef tree_gzw(flag, width, mbl='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Phase unwrapping tree generation (GZW algorithm)\n    | Copyright 2023, Gamma Remote Sensing, v3.8 18-Apr-2023 clw/uw\n    \n    Parameters\n    ----------\n    flag:\n        (input) phase unwrapping flag file\n    width:\n        number of samples/row\n    mbl:\n        maximum branch length (enter - for default: 32)\n    xmin:\n        starting range pixel offset (enter - for default: 0)\n    xmax:\n        last range pixel offset (enter - for default: width-1)\n    ymin:\n        starting azimuth row, relative to start (enter - for default: 0)\n    ymax:\n        last azimuth row, relative to start (enter - for default: nlines-1)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/tree_gzw', flag, width, mbl, xmin, xmax, ymin, ymax]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef unw_model(interf, unw_model, unw, width, xinit='-', yinit='-', ref_ph='-', width_model='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Phase unwrapping using a model of the unwrapped phase\n    | Copyright 2023, Gamma Remote Sensing, v1.9 21-Sep-2023 clw/uw\n    \n    Parameters\n    ----------\n    interf:\n        (input) complex interferogram\n    unw_model:\n        (input) approximate unwrapped phase model (float)\n    unw:\n        (output) unwrapped phase (float)\n    width:\n        number of samples/row of the interferogram\n    xinit:\n        offset to phase reference location in range (col) (enter - for default: 0)\n    yinit:\n        offset to phase reference location in azimuth (row) (enter - for default: 0)\n    ref_ph:\n        reference point phase (radians) (enter - for phase at the reference point)\n    width_model:\n        number of samples/row of the unwrapped phase model (enter - for default: interferogram width)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/bin/unw_model', interf, unw_model, unw, width, xinit, yinit, ref_ph, width_model]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef bpf_ssi(SLC, SLC_par, SLC_flow, SLC_flow_par, SLC_fhigh, SLC_fhigh_par, rbs='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | bpf_ssi: Apply band-pass filtering for split-spectrum interferometry\n    | Copyright 2023 Gamma Remote Sensing, v1.4 19-Apr-2023 uw/cm\n    \n    Parameters\n    ----------\n    SLC:\n        (input) SLC (FCOMPLEX or SCOMPLEX, SLC should not be resampled)\n    SLC_par:\n        (input) SLC parameter file\n    SLC_flow:\n        (output) low frequency band filtered SLC (FCOMPLEX or SCOMPLEX)\n    SLC_flow_par:\n        (output) low frequency band filtered SLC parameter file\n    SLC_fhigh:\n        (output) high frequency band filtered SLC (FCOMPLEX or SCOMPLEX)\n    SLC_fhigh_par:\n        (output) high frequency band filtered SLC parameter file (FCOMPLEX or SCOMPLEX)\n    rbs:\n        relative range spectrum band separation (enter - for default: 0.6666 --> lowest and highest third of processing bandwidth)\n            indicate - for the output files to only calculate filtering parameters\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/bpf_ssi', SLC, SLC_par, SLC_flow, SLC_flow_par, SLC_fhigh, SLC_fhigh_par, rbs]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef bpf_ssi_S1(SLC_tab, SLC_tab_flow, SLC_tab_high, rbs='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | bpf_ssi_S1: Apply band-pass filtering for split-spectrum interferometry for S1 TOPS data\n    | Copyright 2023 Gamma Remote Sensing, v1.2 19-Apr-2023 uw/cm\n    \n    Parameters\n    ----------\n    SLC_tab:\n        (input) SLC_tab\n    SLC_tab_flow:\n        (output) output SLC_tab filename for low frequency band filtered SLC\n    SLC_tab_high:\n        (output) output SLC_tab filename for high frequency band filtered SLC\n    rbs:\n        relative range spectrum band separation (enter - for default: 0.6666 --> lowest and highest third of processing bandwidth)\n            indicate - for the output files to only calculate filtering parameters\n            The filename in SLC_tab_flow and SLC_tab_high are automatically generated by adding .flow and .fhigh\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/bpf_ssi_S1', SLC_tab, SLC_tab_flow, SLC_tab_high, rbs]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef get_GAMMA_RASTER(mode, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Script to determine the default extension for raster images or the operating system type\n    | Copyright 2019 Gamma Remote Sensing, v1.3 1-Apr-2019 clw/uw/cm\n    \n    Parameters\n    ----------\n    mode:\n        Specify the script string output:\n            * 0: raster file extension (ras, bmp, or tif)\n            * 1: OS type: Linux, MINGW64_NT-10.0, CYGWIN_NT-10.0, darwin...\n            * NOTE: The default raster format on Linux systems is SUN_RASTER (\\\\*.ras), for all other operating systems it is BMP (\\\\*.bmp).\n              SUN_RASTER and BMP images are limited in size to 32767 x 32767. TIFF files do not have this limitation.\n              To set the default image raster format for Gamma programs, set the environment variable GAMMA_RASTER as follows:\n              bash:\n              export GAMMA_RASTER=SUN_RASTER  #extension: ras\n              export GAMMA_RASTER=BMP         #extension: bmp\n              export GAMMA_RASTER=TIFF        #extension: tif\n              csh,tcsh:\n              setenv GAMMA_RASTER SUN_RASTER  #extension: ras\n              setenv GAMMA_RASTER BMP         #extension: bmp\n              setenv GAMMA_RASTER TIFF        #extension: tif\n              Environment variables can be set either in processing scripts, or in the shell initialization file (e.g. .bashrc)\n              Programs in the Gamma software that generate raster image files query the value of GAMMA_RASTER if it has been defined.\n              This script can be called from within another script to determine the default raster image format or OS type:\n              bash:        $ext=`get_GAMMA_RASTER 0`\n              csh,tcsh: set ext=`get_GAMMA_RASTER 0`\n              The variable $ext can then be used to specify the format of the output raster file by using it to construct\n              the output file name: \n              bash:        $my_raster=$my_name\".\"$ext\n              csh/tcsh: set my_raster=$my_name\".\"$ext\n              OS: Linux\n              GAMMA_RASTER value: TIFF\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/get_GAMMA_RASTER', mode]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef INTF_SLC(pass1, pass2, rlks, azlks, algorithm='-', cc_win='-', r_pos='-', az_pos='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | INTF_SLC: calculate interferogram, co-registered SLC, intensity images, and correlation\n    | Copyright 2023 Gamma Remote Sensing, v1.2 18-Apr-2023 clw/uw/cm\n    \n    Parameters\n    ----------\n    pass1:\n        pass 1 identifier (example: pass number) reference\n    pass2:\n        pass 2 identifier (example: pass number)\n    rlks:\n        number of range looks\n    azlks:\n        number of azimuth looks\n    algorithm:\n        algorithm used to determine offsets (enter - for default)\n            * 1: intensity image cross correlation (default)\n            * 2: fringe visibility\n    \n    cc_win:\n        window used for estimation of the correlation coefficient (enter - for default: 3)\n    r_pos:\n        range position of center of image patch for initial offset (enter - for default: image center)\n    az_pos:\n        azimuth position of center of image patch for initial offset (enter - for default: image center)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/INTF_SLC', pass1, pass2, rlks, azlks, algorithm, cc_win, r_pos, az_pos]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ionosphere_check(SLC, par, rwin='-', azwin='-', thresh='-', rstep='-', azstep='-', cleaning='-', use_existing='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | ionosphere_check: Determine azimuth spectrum sub-band range and azimuth offsets of a single SLC\n    | Significant non-zero azimuth offsets are a clear indication for the presence of ionospheric effects\n    | Copyright 2024 Gamma Remote Sensing, v1.8 11-Dec-2024 uw/cm\n    \n    Parameters\n    ----------\n    SLC:\n        (input) SLC image (e.g. 20070214.slc)\n    par:\n        (input) SLC parameter file (e.g. 20070214.slc.par)\n    rwin:\n        range window size used in offset estimation (enter - for default: 256)\n    azwin:\n        azimuth window size used in offset estimation (enter - for default: 256)\n    thresh:\n        threshold value used in offset estimation (enter - for default: 0.1)\n    rstep:\n        range step used in offset estimation (enter - for default: rwin/4)\n    azstep:\n        azimuth step used in offset estimation (enter - for default: azwin/4)\n    cleaning:\n        cleaning flag (enter - for default)\n            * 0: no cleaning, keep intermediate files\n            * 1: delete intermediate files (default)\n    \n    use_existing:\n        use files generated in a previous run to speed up processing (enter - for default)\n            * 0: no (default)\n            * 1: yes\n              \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/ionosphere_check', SLC, par, rwin, azwin, thresh, rstep, azstep, cleaning, use_existing]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef make_tab(list, tab, template, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | GAMMA_SOFTWARE-20250625/ISP/scripts/make_tab\n    | Generate a table file from a list or multi-colum table using a text template\n    | Copyright 2024, Gamma Remote Sensing, v1.1 22-Apr-2024 cm/clw/uw\n    \n    Parameters\n    ----------\n    list:\n        (input) list or multi-column table (text)\n    tab:\n        (output) table file (text)\n    template:\n        template definition used to generate a line of the output table, entered between single quotes.\n            Placeholders , , ... specify the columns of the input table.\n            (example 1: '$1.slc $1.slc.par')\n            (example 2: '$1_$2.base $1_$2.off')\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/make_tab', list, tab, template]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef mk_ptarg(RSLC_tab, cal_dir, r_samp, az_samp, osf='-', options='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | GAMMA_SOFTWARE-20250625/ISP/scripts/mk_ptarg\n    | Copyright 2023, Gamma Remote Sensing, v1.6 18-Apr-2023 clw\n    | Perform point target analysis on a stack of coregistered SLCs\n    \n    Parameters\n    ----------\n    RSLC_tab:\n        (input) two column list of coregistered SLC filenames and SLC parameter filenames (including paths) (ascii)\n            1. SLC filename  (includes path)\n            2. SLC parameter filename (includes path)\n    cal_dir:\n        directory for output calibration results\n    r_samp:\n        (input) calibration target range sample number\n    az_samp:\n        (input) calibration target azimuth line number\n    osf:\n        SLC over-sampling factor 2, 4, 8, 16, 32, 64 (enter - for default: 16)\n            -s scale  (option) set image display scale factor (default: 0.3)\n            -e exp    (option) set image display exponent (default: 0.5)\n    options:\n        not documented\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/mk_ptarg', RSLC_tab, cal_dir, r_samp, az_samp, osf, options]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef mk_ptarg_cal(CR_tab, SLC, SLC_par, cal_dir, sigma, c_rpos, c_azpos, osf='-', options='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | GAMMA_SOFTWARE-20250625/ISP/scripts/mk_ptarg_cal\n    | Copyright 2023, Gamma Remote Sensing, v2.1 18-Apr-2023 clw\n    | Perform point target analysis and calibration factor evaluation for a set of point targers\n    \n    Parameters\n    ----------\n    CR_tab:\n        (input) 3 column list of row and sample number of corner reflectors\n            1. Corner reflector id\n            2. SLC column  (includes path)\n            3. SLC row    (includes path)\n    SLC:\n        SLC image\n    SLC_par:\n        SLC_parameter file\n    cal_dir:\n        directory for output calibration results\n    sigma:\n        Radar cross-section of the corner reflectors\n    c_rpos:\n        range sample number of the center of the region used to estimate region\n    c_azpos:\n        azimuth line of the center of the region used to estimate clutter\n    osf:\n        SLC over-sampling factor 2, 4, 8, 16, 32, 64 (enter - for default: 16)\n            -s scale  (option) set image display scale factor (default: 0.2)\n            -e exp    (option) set image display exponent (default: 0.5)\n    options:\n        not documented\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/mk_ptarg_cal', CR_tab, SLC, SLC_par, cal_dir, sigma, c_rpos, c_azpos, osf, options]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef mk_tab3(dir, ext1, ext2, ext3, tab, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Copyright 2023, Gamma Remote Sensing, v1.1 24-Apr-2023 clw\n    | Generate SLC_tab, MLI_tab, or RAW_list for processing\n    \n    Parameters\n    ----------\n    dir:\n        (input) directory including paths that contain the data files\n    ext1:\n        (input) pattern to select data files (examples: slc, raw...), (enter - for all files in the directory)\n    ext2:\n        (input) pattern to select parameter files that match the data (enter - for none, examples: slc.par, raw_par, raw.par)\n    ext3:\n        (input) pattern to select parameter files that match the data (enter - for none, examples: ppar)\n    tab:\n        (output) list of data filenames and associated parameter files (including paths) (text)\n            * NOTE: The current directory is denoted using . \n              \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/mk_tab3', dir, ext1, ext2, ext3, tab]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_plot_az(offset, r_min, r_max, r_plot, az_plot, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | IPTA script: GAMMA_SOFTWARE-20250625/ISP/scripts/offset_plot_az\n    | Copyright 2023, Gamma Remote Sensing, v1.4 17-Apr-2023 clw\n    | extract range and azimuth offsets for a range window from an text offset file\n    \n    Parameters\n    ----------\n    offset:\n        (input) list of range and azimuth offsets generated by offset_pwr (text)\n    r_min:\n        minimum range pixel number to extract range and azimuth offsets\n    r_max:\n        minimum range pixel number to extract range and azimuth offsets\n    r_plot:\n        range offsets xmgrace plot file\n    az_plot:\n        azimuth offsets xmgrace plot file\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/offset_plot_az', offset, r_min, r_max, r_plot, az_plot]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef offset_plot_r(offset, az_min, az_max, r_plot, az_plot, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | IPTA script: GAMMA_SOFTWARE-20250625/ISP/scripts/offset_plot_r\n    | Copyright 2004, Gamma Remote Sensing, v1.3 17-Jan-2005 clw\n    | extract range and azimuth offsets for an azimuth window from an text offset file\n    \n    Parameters\n    ----------\n    offset:\n        (input) list of range and azimuth offsets generated by offset_pwr (text)\n    az_min:\n        minimum azimuth line number to extract range and azimuth offsets\n    az_max:\n        minimum azimuth line number to extract range and azimuth offsets\n    r_plot:\n        range offsets xmgrace plot file\n    az_plot:\n        azimuth offsets xmgrace plot file       \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/offset_plot_r', offset, az_min, az_max, r_plot, az_plot]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef OPOD_vec(SLC_par, OPOD_dir, nstate='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | GAMMA_SOFTWARE-20250625/ISP/scripts/OPOD_vec\n    | Copyright 2025, Gamma Remote Sensing, v1.7 4-Feb-2025 clw/awi/cm\n    | Extract Sentinel-1 state vectors from an OPOD file and write these state vectors to an SLC parameter file\n    \n    Parameters\n    ----------\n    SLC_par:\n        (input/output) ISP SLC/MLI image parameter file\n    OPOD_dir:\n        (input) directory containing Sentinel-1 precise or restituted OPOD orbit data files (AUX_POEORB or AUX_RESORB)\n            orbit files can be downloaded from https://s1qc.asf.alaska.edu/ or https://dataspace.copernicus.eu/\n    nstate:\n        number of state vectors to extract (enter - for default: include 60 sec extention at the start and end of the SLC data)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/OPOD_vec', SLC_par, OPOD_dir, nstate]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef run_all(list, command, log='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | GAMMA_SOFTWARE-20250625/ISP/scripts/run_all\n    | Run a single command iterating over arguments constructed from the elements of a list or multi-column table\n    | Copyright 2025, Gamma Remote Sensing, v1.7 11-Mar-2025 clw/cm\n    \n    Parameters\n    ----------\n    list:\n        (input) list or multi-column table (text)\n    command:\n        command template, entered between single quotes. Command arguments are constructed \n            with placeholders $1, $2, ... that specify the columns of the input table.\n            (example 1: 'multi_look $1.slc $1.slc.par $1.mli $1.mli.par 5 1')\n            (example 2: 'cp -r $1 $2')\n    log:\n        (output) log file that captures all screen output (both stdout and stderr) (enter - for none)\n            Example: run_all dates 'multi_look $1.slc $1.slc.par $1.mli $1.mli.par 5 1' log\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/run_all', list, command, log]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef S1_BURST_tab(SLC1_tab, SLC2_tab, BURST_tab, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | GAMMA_SOFTWARE-20250625/ISP/scripts/S1_BURST_tab\n    | Copyright 2023, Gamma Remote Sensing, v1.5 18-Apr-2023 clw/cm\n    | Calculate Sentinel BURST_tab based on parameters extracted from SLC parameter files listed in SLC1_tab and SLC2_tab\n    | Running SLC_copy_ScanSAR using BURST_tab will generate SLC2 data with matching bursts for each swath of SLC1 and SLC2\n    \n    Parameters\n    ----------\n    SLC1_tab:\n        (input) 3 column list of the reference TOPS SLC swaths in row order IW1, IW2, IW3\n    SLC2_tab:\n        (input) 3 column list of TOPS SLC2 swaths to be resampled to the geometry of the reference SLC1 in row order IW1, IW2, IW3.\n    BURST_tab:\n        (output) 2 column list of the first and last bursts to copy from each swath, one line for each swath\n            BURST_tab line entries: first_burst  last_burst    Note: first burst is 1\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_BURST_tab', SLC1_tab, SLC2_tab, BURST_tab]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef S1_BURST_tab_from_zipfile(zipfile_list, zipfile_ref, burst_number_table_ref='-', cleaning='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | S1_BURST_tab_from_zipfile: Script used to generate S1_BURST_tab to support burst selection\n    | Copyright 2021 Gamma Remote Sensing, v1.8 26-Jan-2021 uw/cm\n    | \n    | NOTE: S1_BURST_tab_from_zipfile now calls S1_BURST_tab_from_zipfile.py\n    | Using directly S1_BURST_tab_from_zipfile.py gives access to\n    | additional useful options and is therefore recommended.\n    | \n    \n    Parameters\n    ----------\n    zipfile_list:\n        (input) ASCII file containing S1 zip filename(s) of one data take\n            indicate - to generate burst_number_table of reference TOPS SLC\n    zipfile_ref:\n        (input) S1 zip filename for the reference TOPS SLC\n    burst_number_table_ref:\n        (input) ASCII file containing first/last burst numbers selected\n            indicate - to use all bursts as present in the reference TOPS SLC zipfile\n    cleaning:\n        flag to indicate if intermediate files are deleted (default=1: yes, 0: not deleted)\n            intermediate and output filenames are generated based on the zip file names\n            \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_BURST_tab_from_zipfile', zipfile_list, zipfile_ref, burst_number_table_ref, cleaning]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef S1_extract_png(zipfile, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | S1_extract_png: Script used to extract (and rename) quicklook (png file) from a S1 ZIP file\n    | Copyright 2019 Gamma Remote Sensing, v1.1 22-Mar-2019 uw/cm\n    \n    Parameters\n    ----------\n    zipfile:\n        (input) Sentinel-1 zipfile (GRD or SLC)\n            \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_extract_png', zipfile]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef S1_GRD_preproc(S1_list, MLI_dir, pol, log, options='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Preprocessing of Sentinel-1 TOPS GRD products, extract GRD data and generate MLI products\n    | Copyright 2023, Gamma Remote Sensing, v1.3 18-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    S1_list:\n        (input) single column text file. Entries are directories (including path) containing Sentinel-1 TOPS GRD products\n    MLI_dir:\n        directory for output SLC data files and SLC parameter files\n            * NOTE: output file names have the form : 20150119_hh.mli\n    \n    pol:\n        SLC polarization to extract (hh,hv,vh,vv)\n    log:\n        (output) S1 GRD pre-processing log file\n            -c       (option) apply radiometric calibration factor without noise subtraction\n            -n       (option) apply radiometric calibration factor with noise subtraction\n            -t       (option) include full timestamp YYYYMMDDtHHMMSSin SLC and SLC_par filenames, default YYYYMMDD\n    options:\n        not documented\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_GRD_preproc', S1_list, MLI_dir, pol, log, options]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef S1_path_number(S1_zipfile, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | S1_path_number: Script to determine S1 path (or track) number\n    | Copyright 2025 Gamma Remote Sensing, v1.3 3-Feb-2025 uw/cm/oc\n    \n    Parameters\n    ----------\n    S1_zipfile:\n        (input) S1 zip filename for the TOPS SLC\n            \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_path_number', S1_zipfile]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef S1_TOPS_preproc(S1_list, SLC_dir, pol, log, options='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Preprocessing of Sentinel-1 TOPS SLC products, extract SLC data and generate SLC_tab\n    | Copyright 2023, Gamma Remote Sensing, v2.8 18-Apr-2023 clw/awi/cm\n    \n    Parameters\n    ----------\n    S1_list:\n        (input) single column text file. Enteries are directories (including path) containing Sentinel-1 TOPS SLC products\n    SLC_dir:\n        directory for output SLC data files and SLC parameter files\n            Note: output file names have the form : 20150119_iw1_hh.slc\n    pol:\n        SLC polarization to extract (hh,hv,vh,vv)\n    log:\n        (output) S1 SLC pre-processing log file\n            -c          (option) apply radiometric calibration factor without noise subtraction\n            -n          (option) apply radiometric calibration factor with noise subtraction\n            -s          (option) output is SCOMPLEX format (default: FCOMPLEX)\n            -t          (option) include full timestamp YYYYMMDDtHHMMSS in SLC and SLC_par filenames, default YYYYMMDD\n            -m MLI_dir  (option) calculate MLI images and store in MLI_dir, enter . for current directory\n            -r rlks     (option) number of MLI range looks (default: 10)\n            -a azlks    (option) number of MLI azimuth looks (default: 2)\n            -b SLC_tab  (option) SLC_tab filename, by default SLC_tab_YYMMDD or SLC_tab_YYYYMMDDtHHMMSS\n    options:\n        not documented\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/S1_TOPS_preproc', S1_list, SLC_dir, pol, log, options]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SBI_INT(RSLC1, RSLC1_par, RSLC2, RSLC2_par, sbi, off, sbi_pwr, par_out, norm_sq='-', rlks='-', azlks='-', iwflg='-', cflg='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | SBI_INT: Script to generate azimuth Split-Beam Interferogram from a coregistered interferometric SLC pair\n    | Copyright 2023 Gamma Remote Sensing, v1.4 19-Apr-2023 uw/clw/cm\n    \n    Parameters\n    ----------\n    RSLC1:\n        (input) master single-look complex image (FCOMPLEX or SCOMPLEX)\n    RSLC1_par:\n        (input) SLC ISP image parameter file of RSLC1\n    RSLC2:\n        (input) co-registered slave SLC image (FCOMPLEX or SCOMPLEX)\n    RSLC2_par:\n        (input) SLC ISP image parameter file of RSLC2\n    sbi:\n        (output) multi-look split-beam interferogram (FCOMPLEX)\n    off:\n        (output) ISP offset parameter file for multi-look split-beam interferogram (ascii)\n    sbi_pwr:\n        (output) multi-look reference backscatter intensity image (FLOAT)\n    par_out:\n        (output) SLC/MLI ISP image parameter file of sbi_pwr\n    norm_sq:\n        normalized squint difference parameter (enter - for default: 0.5)\n    rlks:\n        number of range looks in output split-beam interferogram (enter - for default: 1)\n    azlks:\n        number of azimuth looks in output split-beam interferogram (enter - for default: 1)\n    iwflg:\n        inverse weighting flag (enter - for default)\n            * 0: do not remove azimuth processing spectral window (default)\n            * 1: apply inverse of azimuth compression processing window\n    \n    cflg:\n        flag to indicate if intermediate data (e.g. filtered slc) are deleted (enter - for default)\n            * 0: intermediate data are deleted (default)\n            * 1: intermediate data are NOT deleted\n              file names for band-pass filtered SLC are generated automatically\n              by adding the letter  b / f  for the backward / foward looking beam\n              \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/SBI_INT', RSLC1, RSLC1_par, RSLC2, RSLC2_par, sbi, off, sbi_pwr, par_out, norm_sq, rlks, azlks, iwflg, cflg]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ScanSAR_burst_cc_ad(DIFF_tab, MLI1_tab, MLI2R_tab, slope_tab, texture_tab, CC_tab, log, box_min='-', box_max='-', wgt_flag='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Estimate interferometric coherence for ScanSAR burst data using cc_ad\n    | Copyright 2023, Gamma Remote Sensing, v1.2 17-Apr-2023 cm\n    \n    Parameters\n    ----------\n    DIFF_tab:\n        (input) 3 column list of the DIFF swaths listed in order from near to far range \n            DIFF_tab line entries:  DIFF  MLI_par  TOPS_par\n    MLI1_tab:\n        (input) 3 column list of the reference ScanSAR MLI swaths listed in order from near to far range (enter - for none)\n            MLI1_tab line entries:   MLI  MLI_par  TOPS_par\n    MLI2R_tab:\n        (input) 3 column list of ScanSAR MLI swaths listed in order from near to far range, coregistered with MLI1 (enter - for none)\n            MLI2R_tab line entries:  MLI  MLI_par  TOPS_par\n    slope_tab:\n        (input) 1 column list of ScanSAR phase slope swaths listed in order from near to far range (enter - for none)\n    texture_tab:\n        (input) 1 column list of ScanSAR backscatter texture swaths listed in order from near to far range (enter - for none)\n    CC_tab:\n        (input/output) 3 column list of the CC swaths listed in order from near to far range\n            CC_tab line entries:      CC  MLI_par  TOPS_par\n            \n            * NOTE: if CC_tab does not exist, it will be created in the current directory.\n              The binary file will be named from the differential interferogram name, with the addition of a \".cc\" extension.\n              The MLI_par and TOPS_par files are copied from MLI1_tab if available, from DIFF_tab otherwise.\n    log:\n        (output) processing log file\n    box_min:\n        smallest correlation average box size (enter - for default: 3.0)\n    box_max:\n        largest correlation average box size  (enter - for default: 9.0)\n    wgt_flag:\n        weighting function (enter - for default)\n            * 0: constant (default)\n            * 1: gaussian\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/ScanSAR_burst_cc_ad', DIFF_tab, MLI1_tab, MLI2R_tab, slope_tab, texture_tab, CC_tab, log, box_min, box_max, wgt_flag]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef ScanSAR_burst_cc_wave(DIFF_tab, MLI1_tab, MLI2R_tab, CC_tab, log, bx='-', by='-', wflg='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Estimate interferometric coherence for ScanSAR burst data using cc_wave\n    | Copyright 2019, Gamma Remote Sensing, v1.2 24-Apr-2019 cm\n    \n    Parameters\n    ----------\n    DIFF_tab:\n        (input) 3 column list of the DIFF swaths listed in order from near to far range \n            DIFF_tab line entries:  DIFF  MLI_par  TOPS_par\n    MLI1_tab:\n        (input) 3 column list of the reference ScanSAR MLI swaths listed in order from near to far range (enter - for none)\n            MLI1_tab line entries:   MLI  MLI_par  TOPS_par\n    MLI2R_tab:\n        (input) 3 column list of ScanSAR MLI swaths listed in order from near to far range, coregistered with MLI1 (enter - for none)\n            MLI2R_tab line entries:  MLI  MLI_par  TOPS_par\n    CC_tab:\n        (input/output) 3 column list of the CC swaths listed in order from near to far range\n            CC_tab line entries:      CC  MLI_par  TOPS_par\n            \n            * NOTE: if CC_tab does not exist, it will be created in the current directory.\n              The binary file will be named from the differential interferogram name, with the addition of a \".cc\" extension.\n              The MLI_par and TOPS_par files are copied from MLI1_tab if available, from DIFF_tab otherwise.\n    log:\n        (output) processing log file\n    bx:\n        estimation window size in columns (enter - for default: 5.0)\n    by:\n        estimation window size in lines (enter - for default: 5.0)\n    wflg:\n        estimation window (enter - for default):\n            * 0: rectangular (default)\n            * 1: triangular\n            * 2: Gaussian\n            * 3: normalized vector sum with rectangular window\n            * NOTE: This estimator does not use the MLI data, even when specified\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/ScanSAR_burst_cc_wave', DIFF_tab, MLI1_tab, MLI2R_tab, CC_tab, log, bx, by, wflg]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef SLC_copy_WB(SLC_tab, SLC2_dir, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | GAMMA_SOFTWARE-20250625/ISP/scripts/SLC_copy_WB\n    | Copyright 2011, Gamma Remote Sensing, v1.1 9-Apr-2011 clw\n    | Create a new set of SLCs for all beams in a PALSAR WB ScanSAR image\n    \n    Parameters\n    ----------\n    SLC_tab:\n        (input) two column list of input SLC files and SLC ISP image parameter files (including paths) (text)\n    SLC2_dir:\n        directory to contain copied segments of the input SLC data and the associated parameter files\n            * NOTE: current directory is denoted using .\n    \n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/SLC_copy_WB', SLC_tab, SLC2_dir]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef TX_SLC_preproc(TSX_list, SLC_dir, log, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | Preprocessing of TerraSAR-X TDX1 and TSX1 SLC products using par_TX_SLC\n    | Copyright 2023, Gamma Remote Sensing, v1.3 17-Apr-2023 clw\n    \n    Parameters\n    ----------\n    TSX_list:\n        (input) single column text file with directories (including path)\n            containing path to directory containing product XML for IMAGEDATA/\\\\*.cos files\n    SLC_dir:\n        directory for output SLC data files and SLC parameter files\n    log:\n        (output) processing log file\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/TX_SLC_preproc', TSX_list, SLC_dir, log]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef unw_correction_filt(unw_in, unw_out, width, fsize='-', thresh1='-', thresh2='-', iterations='-', cleaning='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | unw_correction_filt: Phase unwrapping ambiguity error correction relative to spatially filtered phase\n    | Copyright 2023 Gamma Remote Sensing, v1.5 18-Apr-2023 uw/cm\n    \n    Parameters\n    ----------\n    unw_in:\n        (input) unwrapped phase file to correct (float)\n    unw_out:\n        (output) corrected  unwrapped phase file (float)\n    width:\n        number of range samples per line\n    fsize:\n        maximum filter radius in pixels (enter - for default: 5)\n    thresh1:\n        upper threshold for negative phase differences (enter - for default: -3.0)\n    thresh2:\n        lower threshold for positive phase differences (enter - for default: 3.0)\n    iterations:\n        number of iterations to run (enter - for default: 1)\n    cleaning:\n        cleaning flag indicating if intermediary files are deleted (enter - for default)\n            * 0: no\n            * 1: yes (default)\n              The difference between the unfiltered and spatially filtered phase (using fspf) is used\n              to determine an correct phase unwrapping ambiguity errors\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/unw_correction_filt', unw_in, unw_out, width, fsize, thresh1, thresh2, iterations, cleaning]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef unw_correction_poly(unw_in, unw_out, width, poly, flag, max_iter='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | unw_correction_poly: Phase unwrapping ambiguity error correction for polygon areas\n    | Copyright 2023 Gamma Remote Sensing, v1.5 18-Apr-2023 uw/cm\n    \n    Parameters\n    ----------\n    unw_in:\n        (input) unwrapped phase file to correct (FLOAT)\n    unw_out:\n        (output) corrected  unwrapped phase file (FLOAT)\n    width:\n        number of range samples per line\n    poly:\n        (input) polygon file (text)\n    flag:\n        ambiguity correction flag (1: add 2PI; -1: subtract 2PI)\n    max_iter:\n        maximum number of iterations done (enter - for default: 1)\n            (iterations are used (a) if the ambiguity to correct is not 2PI but a\n            multiple of 2PI and (b) if the ambiguity error is in an area with a\n            significant phase slope)\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/unw_correction_poly', unw_in, unw_out, width, poly, flag, max_iter]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef UNWRAP(interf, cc, pwr, unwrap, flag, width, lines, corr_thr='-', pwr_thr='-', r_init='-', az_init='-', r1='-', r2='-', l1='-', l2='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | UNWRAP: unwrap phase\n    | Copyright 2023 Gamma Remote Sensing, v1.4 19-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    interf:\n        interferogram filename  (\\\\*.int, \\\\*.flt)\n    cc:\n        correlation filename (\\\\*.cc)\n    pwr:\n        intensity image (\\\\*.pwr, \\\\*.mli)\n    unwrap:\n        unwrap output file (\\\\*.unw)\n    flag:\n        unwapping flag file (\\\\*.flag)\n    width:\n        interferogram width\n    lines:\n        number of interferogram lines\n    corr_thr:\n        threshold for correlation in the unwrapping mask (enter - for default: 0.7)\n    pwr_thr:\n        intensity threshold for phase unwrapping neutrons, multiples of average (enter - for default: 6.0)\n    r_init:\n        range seed location in the interferogram (enter - for default: width/2)\n    az_init:\n        azimuth seed location in the interferogram (enter - for default: nlines/2)\n    r1:\n        starting range sample offset to unwrap (enter - for default: 0)\n    r2:\n        ending range sample offset to unwrap (enter - for default: width-1)\n    l1:\n        starting line offset to unwrap (enter - for default: 0)\n    l2:\n        ending line offset to unwrap (enter - for default: nlines-1)\\n\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/UNWRAP', interf, cc, pwr, unwrap, flag, width, lines, corr_thr, pwr_thr, r_init, az_init, r1, r2, l1, l2]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\ndef UNWRAP_PAR(interf_par, interf, cc, pwr, unwrap, flag, corr_thr='-', pwr_thr='-', r_init='-', az_init='-', r1='-', r2='-', l1='-', l2='-', logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    | UNWRAP_PAR: unwrap phase using parameters from the ISP interferogram parameter file\n    | Copyright 2023 Gamma Remote Sensing, v1.3 19-Apr-2023 clw/cm\n    \n    Parameters\n    ----------\n    interf_par:\n        interferogram parameter file \\\\*.off\n    interf:\n        interferogram filename  (\\\\*.int, \\\\*.flt)\n    cc:\n        correlation filename (\\\\*.cc)\n    pwr:\n        intensity image (\\\\*.pwr, \\\\*.mli)\n    unwrap:\n        unwrap output file (\\\\*.unw)\n    flag:\n        unwapping flag file (\\\\*.flag)\n    corr_thr:\n        threshold for correlation in the unwrapping mask (enter - for default: 0.7)\n    pwr_thr:\n        intensity threshold for phase unwrapping neutrons, multiples of average (enter - for default: 6.0)\n    r_init:\n        range seed location in the interferogram (enter - for default: width/2)\n    az_init:\n        azimuth seed location in the interferogram (enter - for default: nlines/2)\n    r1:\n        starting range sample offset to unwrap (enter - for default: 0)\n    r2:\n        ending range sample offset to unwrap (enter - for default: width-1)\n    l1:\n        starting line offset to unwrap (enter - for default: 0)\n    l2:\n        ending line offset to unwrap (enter - for default: nlines-1)\\n\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n    \"\"\"\n    cmd = ['GAMMA_SOFTWARE-20250625/ISP/scripts/UNWRAP_PAR', interf_par, interf, cc, pwr, unwrap, flag, corr_thr, pwr_thr, r_init, az_init, r1, r2, l1, l2]\n    process(cmd, logpath=logpath, outdir=outdir, shellscript=shellscript)\n\n\n"
  },
  {
    "path": "pyroSAR/gamma/util.py",
    "content": "###############################################################################\n# universal core routines for processing SAR images with GAMMA\n\n# Copyright (c) 2014-2026, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\n\n\"\"\"\nThis module is intended as a set of generalized processing routines for modularized GAMMA work flows.\nThe function parametrization is intended to be applicable to any kind of situation and input data set.\nThus, instead of choosing a specific parametrization for the data at hand,\ncore parameters are iterated over a set of values in order to find the one best suited for the task.\nThe approach of the single routines is likely to still have drawbacks and might fail in certain situations.\nTesting and suggestions on improvements are very welcome.\n\"\"\"\nimport os\nimport re\nimport shutil\nimport zipfile as zf\nfrom datetime import datetime\nfrom urllib.error import URLError\nimport numpy as np\nfrom spatialist import haversine, Raster\nfrom spatialist.ancillary import union, finder\n\nfrom ..S1 import OSV\nfrom ..drivers import ID, identify_many\nfrom . import ISPPar, Namespace, par2hdr\nfrom ..ancillary import multilook_factors, hasarg, groupby, Lock\nfrom pyroSAR.examine import ExamineSnap, ExamineGamma\nfrom .auxil import do_execute\n\nimport logging\n\nlog = logging.getLogger(__name__)\n\ntry:\n    from .api import diff, disp, isp, lat\nexcept ImportError:\n    pass\n\n\ndef calibrate(id, directory, return_fnames=False,\n              logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    radiometric calibration of SAR scenes\n    \n    Parameters\n    ----------\n    id: ~pyroSAR.drivers.ID\n        an SAR scene object of type pyroSAR.ID or any subclass\n    directory: str\n        the directory to search for GAMMA calibration candidates\n    return_fnames: bool\n        return the names of the output image files? Default: False.\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the GAMMA commands to in shell format\n\n    Returns\n    -------\n    List[str] or None\n    \"\"\"\n    cname = type(id).__name__\n    new = []\n    if cname == 'CEOS_PSR':\n        for image in id.getGammaImages(directory):\n            if image.endswith('_slc'):\n                isp.radcal_SLC(SLC=image,\n                               SLC_par=image + '.par',\n                               CSLC=image + '_cal',\n                               CSLC_par=image + '_cal.par',\n                               K_dB=id.meta['k_dB'],\n                               logpath=logpath,\n                               outdir=outdir,\n                               shellscript=shellscript)\n                par2hdr(image + '_cal.par', image + '_cal.hdr')\n                new.append(image + '_cal')\n    \n    elif cname == 'EORC_PSR':\n        for image in id.getGammaImages(directory):\n            if image.endswith('_mli'):\n                isp.radcal_MLI(MLI=image,\n                               MLI_par=image + '.par',\n                               OFF_par='-',\n                               CMLI=image + '_cal',\n                               antenna='-',\n                               rloss_flag=0,\n                               ant_flag=0,\n                               refarea_flag=1,\n                               sc_dB=0,\n                               K_dB=id.meta['k_dB'],\n                               pix_area=image + '_cal_pix_ell',\n                               logpath=logpath,\n                               outdir=outdir,\n                               shellscript=shellscript)\n                par2hdr(image + '.par', image + '_cal.hdr')\n                par2hdr(image + '.par', image + '_cal_pix_ell' + '.hdr')\n                # rename parameter file \n                os.rename(image + '.par', image + '_cal.par')\n                new.append(image + '_cal')\n    \n    elif cname == 'ESA':\n        k_db = {'ASAR': 55., 'ERS1': 58.24, 'ERS2': 59.75}[id.sensor]\n        inc_ref = 90. if id.sensor == 'ASAR' else 23.\n        imgs = id.getGammaImages(directory)\n        candidates = [x for x in imgs if re.search('_pri$', x)]\n        for image in candidates:\n            out = image.replace('pri', 'grd')\n            isp.radcal_PRI(PRI=image,\n                           PRI_par=image + '.par',\n                           GRD=out,\n                           GRD_par=out + '.par',\n                           K_dB=k_db,\n                           inc_ref=inc_ref,\n                           logpath=logpath,\n                           outdir=outdir,\n                           shellscript=shellscript)\n            par2hdr(out + '.par', out + '.hdr')\n            new.append(out)\n    \n    elif cname == 'SAFE':\n        log.info('calibration already performed during import')\n    \n    else:\n        msg = f'calibration for class {cname} is not implemented yet'\n        raise NotImplementedError(msg)\n    \n    if return_fnames and len(new) > 0:\n        return new\n\n\ndef convert2gamma(id, directory, S1_tnr=True, S1_bnr=True,\n                  basename_extensions=None, exist_ok=False,\n                  return_fnames=False,\n                  logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    general function for converting SAR images to GAMMA format\n\n    Parameters\n    ----------\n    id: ~pyroSAR.drivers.ID\n        an SAR scene object of type pyroSAR.ID or any subclass\n    directory: str\n        the output directory for the converted images\n    S1_tnr: bool\n        only Sentinel-1: should thermal noise removal be applied to the image?\n    S1_bnr: bool\n        only Sentinel-1 GRD: should border noise removal be applied to the image?\n        This is available since version 20191203, for older versions this argument is ignored.\n    basename_extensions: list[str] or None\n        names of additional parameters to append to the basename, e.g. ['orbitNumber_rel']\n    exist_ok: bool\n        allow existing output files and do not create new ones?\n    return_fnames: bool\n        return the names of the output image files? Default: False.\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the GAMMA commands to in bash format\n\n    Returns\n    -------\n    list[str] or None\n        the sorted image file names if ``return_fnames=True`` and None otherwise\n    \"\"\"\n    \n    if not isinstance(id, ID):\n        raise IOError('id must be of type pyroSAR.ID')\n    \n    if id.compression is not None:\n        raise RuntimeError('scene is not yet unpacked')\n    \n    os.makedirs(directory, exist_ok=True)\n    \n    fnames = []\n    \n    cname = type(id).__name__\n    \n    if cname == 'CEOS_ERS':\n        if id.sensor in ['ERS1', 'ERS2']:\n            if id.product == 'SLC' \\\n                    and id.meta['proc_system'] in ['PGS-ERS', 'VMP-ERS', 'SPF-ERS']:\n                outname_base = id.outname_base(extensions=basename_extensions)\n                outname_base = '{}_{}_{}'.format(outname_base,\n                                                 id.polarizations[0],\n                                                 id.product.lower())\n                outname = os.path.join(directory, outname_base)\n                if not os.path.isfile(outname):\n                    lea = id.findfiles('LEA_01.001')[0]\n                    dat = id.findfiles('DAT_01.001')[0]\n                    title = re.sub(r'\\.PS$', '', os.path.basename(id.file))\n                    \n                    pars = {'CEOS_SAR_leader': lea,\n                            'SLC_par': outname + '.par',\n                            'CEOS_DAT': dat,\n                            'SLC': outname,\n                            'inlist': [title],\n                            'logpath': logpath,\n                            'outdir': outdir,\n                            'shellscript': shellscript}\n                    \n                    with Lock(outname):\n                        if do_execute(pars, ['SLC', 'SLC_par'], exist_ok):\n                            isp.par_ESA_ERS(**pars)\n                            par2hdr(outname + '.par', outname + '.hdr')\n                    fnames.append(outname)\n                else:\n                    log.info('scene already converted')\n            else:\n                raise NotImplementedError('ERS {} product of {} processor in CEOS format not implemented yet'\n                                          .format(id.product, id.meta['proc_system']))\n        else:\n            raise NotImplementedError('sensor {} in CEOS format not implemented yet'.format(id.sensor))\n    \n    elif cname == 'CEOS_PSR':\n        images = id.findfiles('^IMG-')\n        if id.product == '1.0':\n            raise RuntimeError('PALSAR level 1.0 products are not supported')\n        for image in images:\n            polarization = re.search('[HV]{2}', os.path.basename(image)).group(0)\n            outname_base = id.outname_base(extensions=basename_extensions)\n            \n            pars = {'CEOS_leader': id.file,\n                    'CEOS_data': image,\n                    'logpath': logpath,\n                    'outdir': outdir,\n                    'shellscript': shellscript}\n            \n            if id.product == '1.1':\n                outname_base = '{}_{}_slc'.format(outname_base, polarization)\n                outname = os.path.join(directory, outname_base)\n                \n                pars['SLC'] = outname\n                pars['SLC_par'] = outname + '.par'\n                \n                with Lock(outname):\n                    if do_execute(pars, ['SLC', 'SLC_par'], exist_ok):\n                        isp.par_EORC_PALSAR(**pars)\n                        par2hdr(outname + '.par', outname + '.hdr')\n            else:\n                outname_base = '{}_{}_mli_geo'.format(outname_base, polarization)\n                outname = os.path.join(directory, outname_base)\n                \n                pars['MLI'] = outname\n                pars['MLI_par'] = outname + '.par'\n                pars['DEM_par'] = outname + '_dem.par'\n                \n                with Lock(outname):\n                    if do_execute(pars, ['MLI', 'MLI_par', 'DEM_par'], exist_ok):\n                        diff.par_EORC_PALSAR_geo(**pars)\n                        par2hdr(outname + '.par', outname + '.hdr')\n            fnames.append(outname)\n    \n    elif cname == 'EORC_PSR':\n        images = id.findfiles('^sar.')\n        facter_m = id.findfiles('facter_m.dat')\n        led = id.findfiles('LED-ALOS2')\n        \n        for image in images:\n            polarization = re.search('[HV]{2}', os.path.basename(image)).group(0)\n            outname_base = id.outname_base(extensions=basename_extensions)\n            outname_base = '{}_{}'.format(outname_base, polarization)\n            outname = os.path.join(directory, outname_base) + '_mli'\n            fnames.append(outname)\n            \n            pars = {'facter_m': facter_m,\n                    'CEOS_leader': led,\n                    'SLC_par': outname + '.par',\n                    'pol': polarization,\n                    'pls_mode': 2,\n                    'KC_data': image,\n                    'pwr': outname,\n                    'logpath': logpath,\n                    'outdir': outdir,\n                    'shellscript': shellscript}\n            \n            with Lock(outname):\n                if do_execute(pars, ['pwr', 'SLC_par'], exist_ok):\n                    isp.par_KC_PALSAR_slr(**pars)\n                    par2hdr(outname + '.par', outname + '.hdr')\n    \n    elif cname == 'ESA':\n        \"\"\"\n        the command par_ASAR also accepts a K_dB argument for calibration\n        in which case the resulting image names will carry the suffix grd;\n        this is not implemented here but instead in function calibrate\n        \"\"\"\n        outname = os.path.join(directory, id.outname_base(extensions=basename_extensions))\n        with Lock(outname):\n            \n            isp.par_ASAR(ASAR_ERS_file=os.path.basename(id.file),\n                         output_name=outname,\n                         outdir=os.path.dirname(id.file),\n                         logpath=logpath,\n                         shellscript=shellscript)\n            \n            os.remove(outname + '.hdr')\n            for item in finder(directory, [os.path.basename(outname)], regex=True):\n                ext = '.par' if item.endswith('.par') else ''\n                outname_base = os.path.basename(item) \\\n                    .strip(ext) \\\n                    .replace('.', '_') \\\n                    .replace('PRI', 'pri') \\\n                    .replace('SLC', 'slc')\n                outname = os.path.join(directory, outname_base + ext)\n                os.rename(item, outname)\n                fnames.append(outname)\n                if outname.endswith('.par'):\n                    par2hdr(outname, outname.replace('.par', '.hdr'))\n    \n    elif cname == 'SAFE':\n        if id.product == 'OCN':\n            raise IOError('Sentinel-1 OCN products are not supported')\n        if id.meta['category'] == 'A':\n            raise IOError('Sentinel-1 annotation-only products are not supported')\n        \n        for xml_ann in finder(os.path.join(id.scene, 'annotation'), [id.pattern_ds], regex=True):\n            base = os.path.basename(xml_ann)\n            match = re.compile(id.pattern_ds).match(base)\n            \n            tiff = os.path.join(id.scene, 'measurement', base.replace('.xml', '.tiff'))\n            xml_cal = os.path.join(id.scene, 'annotation', 'calibration', 'calibration-' + base)\n            \n            product = match.group('product')\n            \n            # In versions released before July 2015, it was assumed that noise was already\n            # removed in GRDs and specifying the XML file meant adding it back to the data.\n            version = ExamineGamma().version\n            if version < '20150701':\n                c = (S1_tnr and product == 'slc') or (not S1_tnr and product == 'grd')\n            else:\n                c = S1_tnr\n            \n            if c:\n                xml_noise = os.path.join(id.scene, 'annotation', 'calibration', 'noise-' + base)\n            else:\n                xml_noise = '-'\n            \n            fields = (id.outname_base(extensions=basename_extensions),\n                      match.group('pol').upper(),\n                      product)\n            basename = '_'.join(fields)\n            outname = os.path.join(directory, basename)\n            \n            pars = {'GeoTIFF': tiff,\n                    'annotation_XML': xml_ann,\n                    'calibration_XML': xml_cal,\n                    'noise_XML': xml_noise,\n                    'logpath': logpath,\n                    'shellscript': shellscript,\n                    'outdir': outdir}\n            \n            if product == 'slc':\n                swath = match.group('swath').upper()\n                old = '{:_<{length}}'.format(id.acquisition_mode, length=len(swath))\n                base_new = basename.replace(old, swath)\n                outname = os.path.join(os.path.dirname(outname), base_new)\n                pars['SLC'] = outname\n                pars['SLC_par'] = outname + '.par'\n                pars['TOPS_par'] = outname + '.tops_par'\n                with Lock(outname):\n                    if do_execute(pars, ['SLC', 'SLC_par', 'TOPS_par'], exist_ok):\n                        isp.par_S1_SLC(**pars)\n                        par2hdr(outname + '.par', outname + '.hdr')\n            else:\n                if hasarg(isp.par_S1_GRD, 'edge_flag'):\n                    if S1_bnr:\n                        pars['edge_flag'] = 2\n                    else:\n                        pars['edge_flag'] = 0\n                else:\n                    if S1_bnr:\n                        raise RuntimeError(\"The command par_S1_GRD of this GAMMA \"\n                                           \"version does not support border noise \"\n                                           \"removal. You may want to consider \"\n                                           \"pyroSAR's own method for this task.\")\n                pars['MLI'] = outname\n                pars['MLI_par'] = outname + '.par'\n                with Lock(outname):\n                    if do_execute(pars, ['MLI', 'MLI_par'], exist_ok):\n                        isp.par_S1_GRD(**pars)\n                        par2hdr(outname + '.par', outname + '.hdr')\n            fnames.append(outname)\n    \n    elif cname == 'TSX':\n        images = id.findfiles(id.pattern_ds)\n        pattern = re.compile(id.pattern_ds)\n        for image in images:\n            pol = pattern.match(os.path.basename(image)).group('pol')\n            outname_base = id.outname_base(extensions=basename_extensions)\n            outname = os.path.join(directory, outname_base + '_' + pol)\n            \n            pars = {'annotation_XML': id.file,\n                    'pol': pol,\n                    'logpath': logpath,\n                    'shellscript': shellscript,\n                    'outdir': outdir}\n            \n            if id.product == 'SSC':\n                outname += '_slc'\n                pars['COSAR'] = image\n                pars['SLC_par'] = outname + '.par'\n                pars['SLC'] = outname\n                with Lock(outname):\n                    if do_execute(pars, ['SLC', 'SLC_par'], exist_ok):\n                        isp.par_TX_SLC(**pars)\n                        par2hdr(outname + '.par', outname + '.hdr')\n            \n            elif id.product == 'MGD':\n                outname += '_mli'\n                pars['GeoTIFF'] = image\n                pars['GRD_par'] = outname + '.par'\n                pars['GRD'] = outname\n                with Lock(outname):\n                    if do_execute(pars, ['GRD', 'GRD_par'], exist_ok):\n                        isp.par_TX_GRD(**pars)\n                        par2hdr(outname + '.par', outname + '.hdr')\n            \n            elif id.product in ['GEC', 'EEC']:\n                outname += '_mli_geo'\n                pars['GeoTIFF'] = image\n                pars['MLI_par'] = outname + '.par'\n                pars['DEM_par'] = outname + '_dem.par'\n                pars['GEO'] = outname\n                with Lock(outname):\n                    if do_execute(pars, ['GEO', 'MLI_par', 'DEM_par'], exist_ok):\n                        diff.par_TX_geo(**pars)\n                        par2hdr(outname + '.par', outname + '.hdr')\n            else:\n                raise RuntimeError('unknown product: {}'.format(id.product))\n            fnames.append(outname)\n    \n    else:\n        raise NotImplementedError('conversion for class {} is not implemented yet'.format(cname))\n    \n    if return_fnames:\n        return sorted(fnames)\n\n\ndef correctOSV(id, directory, osvdir=None, osvType='POE', timeout=20,\n               logpath=None, outdir=None, shellscript=None, url_option=1):\n    \"\"\"\n    correct GAMMA parameter files with orbit state vector information from dedicated OSV files;\n    OSV files are downloaded automatically to either the defined `osvdir` or relative to the\n    user's home directory: `~/.snap/auxdata/Orbits/Sentinel-1`.\n    \n    Parameters\n    ----------\n    id: ~pyroSAR.drivers.ID\n        the scene to be corrected\n    directory: str or None\n        a directory to be scanned for files associated with the scene, e.g. an SLC in GAMMA format.\n        If the OSV file is packed in a zip file it will be unpacked to a subdirectory `osv`.\n    osvdir: str or None\n        the directory of the OSV files. Default None: use the SNAP directory\n        as configured via `pyroSAR.examine.ExamineSnap` or, if SNAP is not\n        installed, `~/.snap/auxdata/Orbits/Sentinel-1` (SNAP default).\n        Subdirectories POEORB and RESORB are created automatically.\n    osvType: str or list[str]\n        the OSV type (POE|RES) to be used\n    timeout: int or tuple or None\n        the timeout in seconds for downloading OSV files as provided to :func:`requests.get`\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the GAMMA commands to in shell format\n    url_option: int\n        the OSV download URL option; see :meth:`pyroSAR.S1.OSV.catch`\n    \n    Returns\n    -------\n    \n    Examples\n    --------\n    \n    >>> from pyroSAR import identify\n    >>> from pyroSAR.gamma import correctOSV, convert2gamma\n    >>> filename = 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'\n    # identify the SAR scene\n    >>> scene = identify(filename)\n    # unpack the zipped scene to an arbitrary directory\n    >>> scene.unpack('/home/test')\n    >>> print(scene.scene)\n    /home/test/S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.SAFE\n    # convert the unpacked scene to GAMMA format\n    >>> convert2gamma(id=scene, directory=scene.scene)\n    # correct the OSV information of the converted GAMMA images\n    >>> correctOSV(id=scene, osvdir='/home/test/osv')\n    \n    See Also\n    --------\n    :meth:`pyroSAR.drivers.SAFE.getOSV`\n    :class:`pyroSAR.S1.OSV`\n    \"\"\"\n    \n    if not isinstance(id, ID):\n        raise IOError('id must be of type pyroSAR.ID')\n    \n    if id.sensor not in ['S1A', 'S1B', 'S1C', 'S1D']:\n        raise IOError('this method is currently only available for Sentinel-1. Please stay tuned...')\n    \n    if not os.path.isdir(logpath):\n        os.makedirs(logpath)\n    \n    if osvdir is None:\n        try:\n            auxdatapath = ExamineSnap().auxdatapath\n        except AttributeError:\n            auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')\n        osvdir = os.path.join(auxdatapath, 'Orbits', 'Sentinel-1')\n    try:\n        id.getOSV(osvdir, osvType, timeout=timeout, url_option=url_option)\n    except URLError:\n        log.warning('..no internet access')\n    \n    parfiles = finder(directory, ['*.par'])\n    parfiles = [x for x in parfiles if ISPPar(x).filetype == 'isp']\n    # read parameter file entries into object\n    with ISPPar(parfiles[0]) as par:\n        # extract acquisition time stamp\n        timestamp = datetime.strptime(par.date, '%Y-%m-%dT%H:%M:%S.%f').strftime('%Y%m%dT%H%M%S')\n    \n    # find an OSV file matching the time stamp and defined OSV type(s)\n    with OSV(osvdir, timeout=timeout) as osv:\n        osvfile = osv.match(sensor=id.sensor, timestamp=timestamp, osvtype=osvType)\n    if not osvfile:\n        raise RuntimeError('no Orbit State Vector file found')\n    \n    if osvfile.endswith('.zip'):\n        osvdir = os.path.join(directory, 'osv')\n        with zf.ZipFile(osvfile) as zip:\n            zip.extractall(path=osvdir)\n        osvfile = os.path.join(osvdir, os.path.basename(osvfile).replace('.zip', ''))\n    \n    # update the GAMMA parameter file with the selected orbit state vectors\n    log.debug('correcting state vectors with file {}'.format(osvfile))\n    for par in parfiles:\n        log.debug(par)\n        with Lock(par.replace('.par', '')):\n            isp.S1_OPOD_vec(SLC_par=par,\n                            OPOD=osvfile,\n                            logpath=logpath,\n                            outdir=outdir,\n                            shellscript=shellscript)\n\n\ndef gc_map_wrap(image, namespace, dem, spacing, exist_ok=False,\n                logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    helper function for computing DEM products in function geocode.\n\n    Parameters\n    ----------\n    image: str\n        the reference SAR image\n    namespace: pyroSAR.gamma.auxil.Namespace\n        an object collecting all output file names\n    dem: str\n        the digital elevation model\n    spacing: int or float\n        the target pixel spacing in meters\n    exist_ok: bool\n        allow existing output files and do not create new ones?\n    logpath: str\n        a directory to write command logfiles to\n    outdir: str\n        the directory to execute the command in\n    shellscript: str\n        a file to write the GAMMA commands to in shell format\n\n    Returns\n    -------\n\n    \"\"\"\n    # compute DEM oversampling factors; will be 1 for range and\n    # azimuth if the DEM spacing matches the target spacing\n    ovs_lat, ovs_lon = ovs(dem + '.par', spacing)\n    \n    image_par = ISPPar(image + '.par')\n    \n    gc_map_args = {'DEM_par': dem + '.par',\n                   'DEM': dem,\n                   'DEM_seg_par': namespace.dem_seg_geo + '.par',\n                   'DEM_seg': namespace.dem_seg_geo,\n                   'lookup_table': namespace.lut_init,\n                   'lat_ovr': ovs_lat,\n                   'lon_ovr': ovs_lon,\n                   'sim_sar': namespace.sim_sar_geo,\n                   'u': namespace.u_geo,\n                   'v': namespace.v_geo,\n                   'inc': namespace.inc_geo,\n                   'psi': namespace.psi_geo,\n                   'pix': namespace.pix_geo,\n                   'ls_map': namespace.ls_map_geo,\n                   'frame': 8,\n                   'ls_mode': 2,\n                   'logpath': logpath,\n                   'shellscript': shellscript,\n                   'outdir': outdir}\n    out_id = ['DEM_seg_par', 'DEM_seg', 'lookup_table', 'sim_sar',\n              'u', 'v', 'inc', 'psi', 'pix', 'ls_map']\n    \n    # remove all output files to make sure they are replaced and not updated\n    if not exist_ok:\n        for id in out_id:\n            base = gc_map_args[id]\n            if base != '-':\n                for suffix in ['', '.par', '.hdr']:\n                    fname = base + suffix\n                    if os.path.isfile(fname):\n                        os.remove(fname)\n    \n    if image_par.image_geometry == 'GROUND_RANGE':\n        gc_map_args.update({'GRD_par': image + '.par'})\n        if do_execute(gc_map_args, out_id, exist_ok):\n            diff.gc_map_grd(**gc_map_args)\n    else:\n        gc_map_args.update({'MLI_par': image + '.par'})\n        if do_execute(gc_map_args, out_id, exist_ok):\n            # gc_map2 is the successor of gc_map. However, earlier versions\n            # did not yet come with full functionality.\n            gc_map2_ok = False\n            if 'gc_map2' in dir(diff):\n                keys = list(gc_map_args.keys())\n                keys.remove('ls_mode')\n                gc_map2_ok = all([hasarg(diff.gc_map2, x) for x in keys])\n            if gc_map2_ok:\n                del gc_map_args['ls_mode']\n                diff.gc_map2(**gc_map_args)\n            else:\n                # gc_map might have an argument OFF_par, which is not needed for SLC/MLI geocoding\n                if hasarg(diff.gc_map, 'OFF_par'):\n                    gc_map_args.update({'OFF_par': '-'})\n                diff.gc_map(**gc_map_args)\n    \n    # create ENVI header files for all created images\n    for item in ['dem_seg_geo', 'sim_sar_geo', 'u_geo', 'v_geo',\n                 'psi_geo', 'pix_geo', 'inc_geo', 'ls_map_geo']:\n        if namespace.isappreciated(item):\n            mods = {'data_type': 1} if item == 'ls_map_geo' else None\n            par2hdr(namespace.dem_seg_geo + '.par', namespace.get(item) + '.hdr', mods)\n\n\ndef geocode(scene, dem, tmpdir, outdir, spacing, scaling='linear', func_geoback=1,\n            nodata=(0, -99), update_osv=True, osvdir=None, allow_RES_OSV=False,\n            cleanup=True, export_extra=None, basename_extensions=None,\n            removeS1BorderNoiseMethod='gamma', refine_lut=False, rlks=None, azlks=None,\n            s1_osv_url_option=1):\n    \"\"\"\n    general function for radiometric terrain correction (RTC) and geocoding of SAR backscatter images with GAMMA.\n    Applies the RTC method by :cite:t:`Small2011` to retrieve gamma nought RTC backscatter.\n    \n    Parameters\n    ----------\n    scene: str or ~pyroSAR.drivers.ID or list\n        the SAR scene(s) to be processed\n    dem: str\n        the reference DEM in GAMMA format\n    tmpdir: str\n        a temporary directory for writing intermediate files\n    outdir: str\n        the directory for the final GeoTIFF output files\n    spacing: float or int\n        the target pixel spacing in meters\n    scaling: str or list[str]\n        the value scaling of the backscatter values; either 'linear', 'db' or a list of both, i.e. ['linear', 'db']\n    func_geoback: {0, 1, 2, 3, 4, 5, 6, 7}\n        backward geocoding interpolation mode (see GAMMA command `geocode_back`)\n        \n         - 0: nearest-neighbor\n         - 1: bicubic spline (default)\n         - 2: bicubic-spline, interpolate log(data)\n         - 3: bicubic-spline, interpolate sqrt(data)\n         - 4: B-spline interpolation (default B-spline degree: 5)\n         - 5: B-spline interpolation sqrt(x) (default B-spline degree: 5)\n         - 6: Lanczos interpolation (default Lanczos function order: 5)\n         - 7: Lanczos interpolation sqrt(x) (default Lanczos function order: 5)\n        \n        .. note::\n        \n            log and sqrt interpolation modes should only be used with non-negative data!\n        \n        .. note::\n        \n            GAMMA recommendation for MLI data: \"The interpolation should be performed on\n            the square root of the data. A mid-order (3 to 5) B-spline interpolation is recommended.\"\n    nodata: tuple[float or int]\n        the nodata values for the output files; defined as a tuple with two values, the first for linear,\n        the second for logarithmic scaling\n    update_osv: bool\n        update the orbit state vectors?\n    osvdir: str or None\n        a directory for Orbit State Vector files;\n        this is currently only used by for Sentinel-1 where two subdirectories POEORB and RESORB are created;\n        if set to None, a subdirectory OSV is created in the directory of the unpacked scene.\n    allow_RES_OSV: bool\n        also allow the less accurate RES orbit files to be used?\n        Otherwise the function will raise an error if no POE file exists.\n    cleanup: bool\n        should all files written to the temporary directory during function execution be deleted after processing?\n    export_extra: list[str] or None\n        a list of image file IDs to be exported to outdir\n        \n         - format is GeoTIFF if the file is geocoded and ENVI otherwise. Non-geocoded images can be converted via GAMMA\n           command data2tiff yet the output was found impossible to read with GIS software\n         - scaling of SAR image products is applied as defined by parameter `scaling`\n         - see Notes for ID options\n    basename_extensions: list[str] or None\n        names of additional parameters to append to the basename, e.g. ['orbitNumber_rel']\n    removeS1BorderNoiseMethod: str or None\n        the S1 GRD border noise removal method to be applied, See :func:`pyroSAR.S1.removeGRDBorderNoise` for details; one of the following:\n        \n         - 'ESA': the pure implementation as described by ESA\n         - 'pyroSAR': the ESA method plus the custom pyroSAR refinement\n         - 'gamma': the GAMMA implementation of :cite:`Ali2018`\n         - None: do not remove border noise\n    refine_lut: bool\n        should the LUT for geocoding be refined using pixel area normalization?\n    rlks: int or None\n        the number of range looks. If not None, overrides the computation done by function\n        :func:`pyroSAR.ancillary.multilook_factors` based on the image pixel spacing and the target spacing.\n    azlks: int or None\n        the number of azimuth looks. Like `rlks`.\n    s1_osv_url_option: int\n        the OSV download URL option; see :meth:`pyroSAR.S1.OSV.catch`\n    \n    Returns\n    -------\n    \n    Note\n    ----\n    | intermediate output files\n    | DEM products are named <scene identifier>_<ID>, e.g. `S1A__IW___A_20141012T162337_inc_geo`\n    | SAR products will additionally contain the polarization, e.g. `S1A__IW___A_20141012T162337_VV_grd_mli`\n    | IDs in brackets are only written if selected by `export_extra`\n    \n    - images in range-Doppler geometry\n    \n      * **grd**: the ground range detected SAR intensity image\n      * **grd_mli**: the multi-looked grd image with approximated target resolution\n      * (**pix_ellip_sigma0**): ellipsoid-based pixel area\n      * (**pix_area_sigma0**): illuminated area as obtained from integrating DEM-facets in sigma projection (command pixel_area)\n      * (**pix_area_gamma0**): illuminated area as obtained from integrating DEM-facets in gamma projection (command pixel_area)\n      * **pix_ratio**: pixel area normalization factor (pix_ellip_sigma0 / pix_area_gamma0)\n      * **grd_mli_gamma0-rtc**: the terrain-corrected gamma0 backscatter (grd_mli * pix_ratio)\n      * (**gs_ratio**): gamma-sigma ratio (pix_gamma0 / pix_sigma0)\n    \n    - images in map geometry\n    \n      * **dem_seg_geo**: dem subsetted to the extent of the intersection between input DEM and SAR image\n      * (**u_geo**): zenith angle of surface normal vector n (angle between z and n)\n      * (**v_geo**): orientation angle of n (between x and projection of n in xy plane)\n      * **inc_geo**: local incidence angle (between surface normal and look vector)\n      * (**psi_geo**): projection angle (between surface normal and image plane normal)\n      * **ls_map_geo**: layover and shadow map\n      * (**sim_sar_geo**): simulated SAR backscatter image\n      * (**pix_ellip_sigma0_geo**): ellipsoid-based pixel area\n      * (**pix_area_sigma0_geo**): illuminated area as obtained from integrating DEM-facets in sigma projection (command pixel_area)\n      * (**pix_area_gamma0_geo**): illuminated area as obtained from integrating DEM-facets in gamma projection (command pixel_area)\n      * (**pix_ratio_geo**): pixel area normalization factor (pix_ellip_sigma0 / pix_area_gamma0)\n      * (**gs_ratio_geo**): gamma-sigma ratio (pix_gamma0 / pix_sigma0)\n    \n    - additional files\n    \n      * **lut_init**: initial geocoding lookup table\n    \n    - files specific to lookup table refinement\n    \n      * **lut_fine**: refined geocoding lookup table\n      * **diffpar**: ISP offset/interferogram parameter file\n      * **offs**: offset estimates (fcomplex)\n      * **coffs**: culled range and azimuth offset estimates (fcomplex)\n      * **coffsets**: culled offset estimates and cross correlation values (text format)\n      * **ccp**: cross-correlation of each patch (0.0->1.0) (float)\n    \n    Examples\n    --------\n    geocode a Sentinel-1 scene and export the local incidence angle map with it\n    \n    >>> from pyroSAR.gamma import geocode\n    >>> filename = 'S1A_IW_GRDH_1SDV_20180829T170656_20180829T170721_023464_028DE0_F7BD.zip'\n    >>> geocode(scene=filename, dem='demfile', outdir='outdir', spacing=20, scaling='db',\n    >>>         export_extra=['dem_seg_geo', 'inc_geo', 'ls_map_geo'])\n    \n    .. figure:: figures/gamma_geocode.svg\n        :align: center\n        \n        Workflow diagram for function geocode for processing a Sentinel-1 Ground Range\n        Detected (GRD) scene to radiometrically terrain corrected (RTC) gamma nought backscatter.\n    \n    \"\"\"\n    \n    # experimental option to reuse intermediate products; currently affects:\n    # - scene unpacking\n    # - conversion to GAMMA format\n    # - multilooking\n    # - DEM product generation\n    # - terrain flattening\n    exist_ok = False\n    \n    scenes = scene if isinstance(scene, list) else [scene]\n    if len(scenes) > 2:\n        raise RuntimeError(\"currently only one or two scenes can be passed via argument 'scene'\")\n    scenes = identify_many(scenes)\n    ref = scenes[0]\n    \n    if ref.sensor not in ['S1A', 'S1B', 'S1C', 'S1D', 'PALSAR-2']:\n        raise RuntimeError(\n            'this function currently only supports Sentinel-1 and PALSAR-2 Path data. Please stay tuned...')\n    \n    if export_extra is not None and not isinstance(export_extra, list):\n        raise TypeError(\"parameter 'export_extra' must either be None or a list\")\n    \n    tmpdir = os.path.join(tmpdir, ref.outname_base(extensions=basename_extensions))\n    \n    for dir in [tmpdir, outdir]:\n        os.makedirs(dir, exist_ok=True)\n    \n    if ref.is_processed(outdir):\n        log.info('scene {} already processed'.format(ref.outname_base(extensions=basename_extensions)))\n        return\n    \n    shellscript = os.path.join(tmpdir, ref.outname_base(extensions=basename_extensions) + '_commands.sh')\n    \n    scaling = [scaling] if isinstance(scaling, str) else scaling if isinstance(scaling, list) else []\n    scaling = union(scaling, ['db', 'linear'])\n    if len(scaling) == 0:\n        raise IOError('wrong input type for parameter scaling')\n    \n    for scene in scenes:\n        if scene.compression is not None:\n            log.info('unpacking scene')\n            try:\n                scene.unpack(tmpdir, exist_ok=exist_ok)\n            except RuntimeError:\n                log.info('scene was attempted to be processed before, exiting')\n                return\n    \n    path_log = os.path.join(tmpdir, 'logfiles')\n    if not os.path.isdir(path_log):\n        os.makedirs(path_log)\n    \n    for scene in scenes:\n        if scene.sensor in ['S1A', 'S1B', 'S1C', 'S1D'] and removeS1BorderNoiseMethod in ['ESA', 'pyroSAR']:\n            log.info('removing border noise')\n            scene.removeGRDBorderNoise(method=removeS1BorderNoiseMethod)\n    \n    log.info('converting scene to GAMMA format')\n    gamma_bnr = True if removeS1BorderNoiseMethod == 'gamma' else False\n    images = []\n    for scene in scenes:\n        files = convert2gamma(scene, directory=tmpdir, logpath=path_log, outdir=tmpdir,\n                              basename_extensions=basename_extensions, shellscript=shellscript,\n                              S1_bnr=gamma_bnr, exist_ok=exist_ok, return_fnames=True)\n        images.extend(files)\n    \n    if update_osv:\n        for scene in scenes:\n            if scene.sensor in ['S1A', 'S1B', 'S1C', 'S1D']:\n                log.info('updating orbit state vectors')\n                if allow_RES_OSV:\n                    osvtype = ['POE', 'RES']\n                else:\n                    osvtype = 'POE'\n                try:\n                    correctOSV(id=scene, directory=tmpdir, osvdir=osvdir, osvType=osvtype,\n                               url_option=s1_osv_url_option,\n                               logpath=path_log, outdir=tmpdir, shellscript=shellscript)\n                except RuntimeError:\n                    msg = 'orbit state vector correction failed for scene {}'\n                    log.warning(msg.format(scene.scene))\n                    return\n    \n    log.info('calibrating')\n    images_cal = []\n    for scene in scenes:\n        files = calibrate(id=scene, directory=tmpdir, return_fnames=True,\n                          logpath=path_log, outdir=tmpdir, shellscript=shellscript)\n        if files is not None:\n            images_cal.extend(files)\n    if len(images_cal) > 0:\n        images = images_cal\n    \n    if len(scenes) > 1:\n        images_new = []\n        groups = groupby(images, 'polarization')\n        for group in groups:\n            out = group[0] + '_cat'\n            out_par = out + '.par'\n            all_exist = all([os.path.isfile(x) for x in [out, out_par]])\n            if not all_exist:\n                log.info('mosaicing scenes')\n                isp.MLI_cat(MLI1=group[0],\n                            MLI1_par=group[0] + '.par',\n                            MLI2=group[1],\n                            MLI2_par=group[1] + '.par',\n                            MLI3=out,\n                            MLI3_par=out_par,\n                            logpath=path_log, outdir=tmpdir, shellscript=shellscript)\n                par2hdr(out_par, out + '.hdr')\n            images_new.append(out)\n        images = images_new\n    \n    if scene.sensor in ['S1A', 'S1B', 'S1C', 'S1D']:\n        log.info('multilooking')\n        groups = groupby(images, 'polarization')\n        images = []\n        for group in groups:\n            out = group[0].replace('IW1', 'IW_') + '_mli'\n            infile = group[0] if len(group) == 1 else group\n            multilook(infile=infile, outfile=out, spacing=spacing,\n                      rlks=rlks, azlks=azlks, exist_ok=exist_ok,\n                      logpath=path_log, outdir=tmpdir, shellscript=shellscript)\n            images.append(out)\n    products = list(images)\n    reference = images[0]\n    \n    # create output names for files to be written\n    # appreciated files will be written\n    n = Namespace(tmpdir, scene.outname_base(extensions=basename_extensions))\n    n.appreciate(['dem_seg_geo', 'lut_init', 'inc_geo', 'ls_map_geo'])\n    \n    pix_geo = []\n    if export_extra is not None:\n        n.appreciate(export_extra)\n        pix = ['pix_area_sigma0', 'pix_area_gamma0', 'pix_ratio', 'gs_ratio', 'pix_ellip_sigma0']\n        for item in pix:\n            if item + '_geo' in export_extra:\n                pix_geo.append(item + '_geo')\n                n.appreciate([item])\n    \n    if refine_lut:\n        n.appreciate(['pix_area_sigma0'])\n    \n    reference_par = ISPPar(reference + '.par')\n    ######################################################################\n    # geocoding and DEM product generation ###############################\n    ######################################################################\n    log.info('geocoding and creating DEM products')\n    gc_map_wrap(image=reference, namespace=n, dem=dem, spacing=spacing, exist_ok=exist_ok,\n                logpath=path_log, outdir=tmpdir, shellscript=shellscript)\n    \n    sim_width = ISPPar(n.dem_seg_geo + '.par').width\n    ######################################################################\n    # RTC reference area computation #####################################\n    ######################################################################\n    log.info('computing pixel area (for radiometric terrain correction, rtc)')\n    pixel_area_wrap(image=reference, namespace=n, lut=n.lut_init, exist_ok=exist_ok,\n                    logpath=path_log, outdir=tmpdir, shellscript=shellscript)\n    \n    ######################################################################\n    # lookup table refinement ############################################\n    ######################################################################\n    lut_final = n.lut_init\n    if refine_lut:\n        log.info('refining lookup table')\n        # Refinement of geocoding lookup table\n        diff.create_diff_par(PAR_1=reference + '.par',\n                             PAR_2='-',\n                             DIFF_par=reference + '_diff.par',\n                             PAR_type=1,\n                             iflg=0,\n                             logpath=path_log,\n                             outdir=tmpdir,\n                             shellscript=shellscript)\n        # Refinement of lookup table\n        # for \"shift\" data offset window size enlarged twice to 512 and 256, for data without shift 256 128\n        diff.offset_pwrm(MLI_1=n.pix_area_sigma0,\n                         MLI_2=reference,\n                         DIFF_par=reference + '_diff.par',\n                         offs=reference + '_offs',\n                         ccp=reference + '_ccp',\n                         rwin=512,\n                         azwin=256,\n                         offsets=reference + '_offsets.txt',\n                         n_ovr=2,\n                         nr=64,\n                         naz=32,\n                         thres=0.2,\n                         logpath=path_log,\n                         outdir=tmpdir,\n                         shellscript=shellscript)\n        # par2hdr(master + '.par', master + '_offs' + '.hdr')\n        diff.offset_fitm(offs=reference + '_offs',\n                         ccp=reference + '_ccp',\n                         DIFF_par=reference + '_diff.par',\n                         coffs=reference + '_coffs',\n                         coffsets=reference + '_coffsets',\n                         thres=0.2,\n                         npoly=4,\n                         logpath=path_log,\n                         outdir=tmpdir,\n                         shellscript=shellscript)\n        # Updating of the look-up table\n        diff.gc_map_fine(gc_in=lut_final,\n                         width=sim_width,\n                         DIFF_par=reference + '_diff.par',\n                         gc_out=lut_final + '.fine',\n                         ref_flg=1,\n                         logpath=path_log,\n                         outdir=tmpdir,\n                         shellscript=shellscript)\n        # Reproduce pixel area estimate\n        pixel_area_wrap(image=reference, namespace=n, lut=lut_final + '.fine',\n                        logpath=path_log, outdir=tmpdir, shellscript=shellscript)\n        lut_final = lut_final + '.fine'\n    ######################################################################\n    # radiometric terrain correction and back-geocoding ##################\n    ######################################################################\n    log.info('applying rtc and back-geocoding')\n    for image in images:\n        if 'lat' in locals():\n            lat.product(data_1=image,\n                        data_2=n.pix_ratio,\n                        product=image + '_gamma0-rtc',\n                        width=reference_par.range_samples,\n                        bx=1,\n                        by=1,\n                        logpath=path_log,\n                        outdir=tmpdir,\n                        shellscript=shellscript)\n        else:\n            lat_product(data_in1=image,\n                        data_in2=n.pix_ratio,\n                        data_out=image + '_gamma0-rtc')\n        par2hdr(reference + '.par', image + '_gamma0-rtc.hdr')\n        diff.geocode_back(data_in=image + '_gamma0-rtc',\n                          width_in=reference_par.range_samples,\n                          lookup_table=lut_final,\n                          data_out=image + '_gamma0-rtc_geo',\n                          width_out=sim_width,\n                          interp_mode=func_geoback,\n                          logpath=path_log,\n                          outdir=tmpdir,\n                          shellscript=shellscript)\n        par2hdr(n.dem_seg_geo + '.par', image + '_gamma0-rtc_geo.hdr')\n        products.extend([image + '_gamma0-rtc', image + '_gamma0-rtc_geo'])\n    ######################################################################\n    # log scaling and image export #######################################\n    ######################################################################\n    log.info('conversion to (dB and) GeoTIFF')\n    \n    def exporter(data_in, outdir, nodata, scale='linear', dtype=2):\n        if scale == 'db':\n            if re.search('_geo', os.path.basename(data_in)):\n                width = sim_width\n                refpar = n.dem_seg_geo + '.par'\n            else:\n                width = reference_par.range_samples\n                refpar = reference + '.par'\n            if 'lat' in locals():\n                lat.linear_to_dB(data_in=data_in,\n                                 data_out=data_in + '_db',\n                                 width=width,\n                                 inverse_flag=0,\n                                 null_value=nodata,\n                                 logpath=path_log,\n                                 outdir=tmpdir,\n                                 shellscript=shellscript)\n            else:\n                lat_linear_to_db(data_in=data_in,\n                                 data_out=data_in + '_db')\n            par2hdr(refpar, data_in + '_db.hdr')\n            data_in += '_db'\n        if re.search('_geo', os.path.basename(data_in)):\n            outfile = os.path.join(outdir, os.path.basename(data_in) + '.tif')\n            disp.data2geotiff(DEM_par=n.dem_seg_geo + '.par',\n                              data=data_in,\n                              type=dtype,\n                              GeoTIFF=outfile,\n                              no_data=nodata,\n                              logpath=path_log,\n                              outdir=tmpdir,\n                              shellscript=shellscript)\n        \n        else:\n            outfile = os.path.join(outdir, os.path.basename(data_in))\n            shutil.copyfile(data_in, outfile)\n            shutil.copyfile(data_in + '.hdr', outfile + '.hdr')\n    \n    for image in images:\n        for scale in scaling:\n            exporter(data_in=image + '_gamma0-rtc_geo', scale=scale, dtype=2,\n                     nodata=dict(zip(('linear', 'db'), nodata))[scale], outdir=outdir)\n    \n    if scene.sensor in ['S1A', 'S1B', 'S1C', 'S1D']:\n        outname_base = scene.outname_base(extensions=basename_extensions)\n        shutil.copyfile(os.path.join(scene.scene, 'manifest.safe'),\n                        os.path.join(outdir, outname_base + '_manifest.safe'))\n    \n    if export_extra is not None:\n        log.info('back-geocoding and exporting extra products')\n        for key in export_extra:\n            if key in pix_geo:\n                fname = n.get(key)\n                diff.geocode_back(data_in=fname.replace('_geo', ''),\n                                  width_in=reference_par.range_samples,\n                                  lookup_table=lut_final,\n                                  data_out=fname,\n                                  width_out=sim_width,\n                                  interp_mode=func_geoback,\n                                  logpath=path_log,\n                                  outdir=tmpdir,\n                                  shellscript=shellscript)\n                par2hdr(n.dem_seg_geo + '.par', fname + '.hdr')\n            # SAR image products\n            product_match = [x for x in products if x.endswith(key)]\n            if len(product_match) > 0:\n                for product in product_match:\n                    for scale in scaling:\n                        exporter(data_in=product, outdir=outdir, scale=scale, dtype=2,\n                                 nodata=dict(zip(('linear', 'db'), nodata))[scale])\n            # ancillary (DEM) products\n            elif n.isfile(key) and key not in ['lut_init']:\n                filename = n[key]\n                dtype = 5 if key == 'ls_map_geo' else 2\n                nodata = 0\n                exporter(filename, outdir, dtype=dtype, nodata=nodata)\n            else:\n                log.warning('cannot export file {}'.format(key))\n    \n    shutil.copyfile(shellscript, os.path.join(outdir, os.path.basename(shellscript)))\n    \n    if cleanup:\n        log.info('cleaning up temporary files')\n        shutil.rmtree(tmpdir)\n\n\ndef _delete_product(path):\n    for item in [path, path + '.hdr', path + '.aux.xml']:\n        if os.path.isfile(item):\n            os.remove(item)\n\n\ndef lat_linear_to_db(data_in: str, data_out: str) -> None:\n    \"\"\"\n    Alternative to LAT module command linear_to_dB.\n\n    Parameters\n    ----------\n    data_in\n        the input data file\n    data_out\n        the output data file\n    \"\"\"\n    tmp = data_out + '_tmp'\n    try:\n        with Raster(data_in) as ras:\n            a1 = ras.array()\n            a1[a1 <= 0] = np.nan\n            out = 10 * np.log10(a1)\n            out[~np.isfinite(out)] = 0\n            ras.write(outname=tmp, array=out, format='ENVI',\n                      nodata=0, dtype='float32')\n        disp.swap_bytes(infile=tmp, outfile=data_out, swap_type=4)\n        shutil.copy(src=data_in + '.hdr', dst=data_out + '.hdr')\n    except Exception:\n        _delete_product(data_out)\n        raise\n    finally:\n        _delete_product(tmp)\n\n\ndef lat_product(data_in1: str, data_in2: str, data_out: str) -> None:\n    \"\"\"\n    Alternative to LAT module command product.\n\n    Parameters\n    ----------\n    data_in1\n        input data file 1\n    data_in2\n        input data file 2\n    data_out\n        the output data file\n    \"\"\"\n    tmp = data_out + '_tmp'\n    try:\n        with Raster(data_in1) as ras:\n            a1 = ras.array()\n            a1[a1 == 0] = np.nan\n        with Raster(data_in2) as ras:\n            a2 = ras.array()\n            a2[a2 == 0] = np.nan\n            out = a1 * a2\n            out[~np.isfinite(out)] = 0\n            ras.write(outname=tmp, array=out, format='ENVI',\n                      nodata=0, dtype='float32')\n        disp.swap_bytes(infile=tmp, outfile=data_out, swap_type=4)\n        shutil.copy(src=data_in2 + '.hdr', dst=data_out + '.hdr')\n    except Exception:\n        _delete_product(data_out)\n        raise\n    finally:\n        _delete_product(tmp)\n\n\ndef lat_ratio(data_in1: str, data_in2: str, data_out: str) -> None:\n    \"\"\"\n    Alternative to LAT module command ratio.\n\n    Parameters\n    ----------\n    data_in1\n        input data file 1\n    data_in2\n        input data file 2\n    data_out\n        the output data file\n    \"\"\"\n    tmp = data_out + '_tmp'\n    try:\n        with Raster(data_in1) as ras:\n            a1 = ras.array()\n            a1[a1 == 0] = np.nan\n        with Raster(data_in2) as ras:\n            a2 = ras.array()\n            a2[a2 == 0] = np.nan\n            out = a1 / a2\n            out[~np.isfinite(out)] = 0\n            ras.write(outname=tmp, array=out, format='ENVI',\n                      nodata=0, dtype='float32')\n        disp.swap_bytes(infile=tmp, outfile=data_out, swap_type=4)\n        shutil.copy(src=data_in1 + '.hdr', dst=data_out + '.hdr')\n    except Exception:\n        _delete_product(data_out)\n        raise\n    finally:\n        _delete_product(tmp)\n\n\ndef multilook(infile, outfile, spacing, rlks=None, azlks=None,\n              exist_ok=False, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    Multilooking of SLC and MLI images.\n\n    If the image is in slant range the ground range resolution is computed by dividing the range pixel spacing by\n    the sine of the incidence angle.\n\n    The looks in range and azimuth are chosen to approximate the target resolution by rounding the ratio between\n    target resolution and ground range/azimuth pixel spacing to the nearest integer.\n\n    An ENVI HDR parameter file is automatically written for better handling in other software.\n\n    Parameters\n    ----------\n    infile: str or list[str]\n        one of the following:\n\n        - a SAR image in GAMMA format with a parameter file <infile>.par\n        - a list of ScanSAR SLC swaths with parameter files <slc>.par and <slc>.tops_par; in this case a text file\n          <outfile>_slc-tab.txt will be created, which is passed to the GAMMA command ``multi_look_ScanSAR``\n    outfile: str\n        the name of the output GAMMA MLI file\n    spacing: int\n        the target pixel spacing in ground range\n    rlks: int or None\n        the number of range looks. If not None, overrides the computation done by function\n        :func:`pyroSAR.ancillary.multilook_factors` based on the image pixel spacing and the target spacing.\n    azlks: int or None\n        the number of azimuth looks. Like `rlks`.\n    exist_ok: bool\n        allow existing output files and do not create new ones?\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the GAMMA commands to in shell format\n\n    See Also\n    --------\n    pyroSAR.ancillary.multilook_factors\n    \"\"\"\n    # read the input parameter file\n    if isinstance(infile, str):\n        par = ISPPar(infile + '.par')\n        range_pixel_spacing = par.range_pixel_spacing\n        azimuth_pixel_spacing = par.azimuth_pixel_spacing\n        incidence_angle = par.incidence_angle\n        image_geometry = par.image_geometry\n        image_format = par.image_format\n    elif isinstance(infile, list):\n        par = [ISPPar(x + '.par') for x in infile]\n        range_pixel_spacings = [getattr(x, 'range_pixel_spacing') for x in par]\n        range_pixel_spacing = sum(range_pixel_spacings) / len(par)\n        azimuth_pixel_spacings = [getattr(x, 'azimuth_pixel_spacing') for x in par]\n        azimuth_pixel_spacing = sum(azimuth_pixel_spacings) / len(par)\n        incidence_angles = [getattr(x, 'incidence_angle') for x in par]\n        incidence_angle = sum(incidence_angles) / len(par)\n        image_geometry = par[0].image_geometry\n        image_format = par[0].image_format\n    else:\n        raise TypeError(\"'infile' must be str or list\")\n    \n    if rlks is None and azlks is None:\n        rlks, azlks = multilook_factors(source_rg=range_pixel_spacing,\n                                        source_az=azimuth_pixel_spacing,\n                                        target=spacing,\n                                        geometry=image_geometry,\n                                        incidence=incidence_angle)\n    if [rlks, azlks].count(None) > 0:\n        raise RuntimeError(\"'rlks' and 'azlks' must either both be integers or None\")\n    \n    pars = {'rlks': rlks,\n            'azlks': azlks,\n            'logpath': logpath,\n            'shellscript': shellscript,\n            'outdir': outdir}\n    \n    if image_format in ['SCOMPLEX', 'FCOMPLEX']:\n        # multilooking of SLC images\n        pars['MLI'] = outfile\n        pars['MLI_par'] = outfile + '.par'\n        if isinstance(infile, str):\n            pars['SLC'] = infile\n            pars['SLC_par'] = infile + '.par'\n            if do_execute(pars, ['MLI', 'MLI_par'], exist_ok):\n                isp.multi_look(**pars)\n                par2hdr(outfile + '.par', outfile + '.hdr')\n        else:\n            slcpar = [x + '.par' for x in infile]\n            topspar = [x + '.tops_par' for x in infile]\n            slc_tab = outfile + '_slc-tab.txt'\n            if not os.path.isfile(slc_tab) or not exist_ok:\n                with open(slc_tab, 'w') as tab:\n                    for item in zip(infile, slcpar, topspar):\n                        tab.write(' '.join(item) + '\\n')\n            pars['SLC_tab'] = slc_tab\n            if do_execute(pars, ['MLI', 'MLI_par'], exist_ok):\n                if 'multi_look_ScanSAR' in dir(isp):\n                    isp.multi_look_ScanSAR(**pars)\n                else:\n                    isp.multi_S1_TOPS(**pars)\n                par2hdr(outfile + '.par', outfile + '.hdr')\n    else:\n        # multilooking of MLI images\n        pars['MLI_in'] = infile\n        pars['MLI_in_par'] = infile + '.par'\n        pars['MLI_out'] = outfile\n        pars['MLI_out_par'] = outfile + '.par'\n        if do_execute(pars, ['MLI_out', 'MLI_out_par'], exist_ok):\n            isp.multi_look_MLI(**pars)\n            par2hdr(outfile + '.par', outfile + '.hdr')\n\n\ndef ovs(parfile, spacing):\n    \"\"\"\n    compute DEM oversampling factors for a target resolution in meters\n\n    Parameters\n    ----------\n    parfile: str\n        a GAMMA DEM parameter file\n    spacing: int or float\n        the target pixel spacing in meters\n    \n    Returns\n    -------\n    tuple of float\n        the oversampling factors for latitude and longitude\n    \"\"\"\n    # read DEM parameter file\n    dempar = ISPPar(parfile)\n    \n    # extract coordinates and pixel posting of the DEM\n    if hasattr(dempar, 'post_north'):\n        post_north, post_east = [abs(float(x)) for x in\n                                 [dempar.post_north, dempar.post_east]]\n    else:\n        res_lat, res_lon = [abs(float(x)) for x in [dempar.post_lat, dempar.post_lon]]\n        \n        # compute center coordinate\n        lat = float(dempar.corner_lat) - (res_lat * (dempar.nlines // 2))\n        lon = float(dempar.corner_lon) + (res_lon * (dempar.width // 2))\n        \n        # convert DEM resolution to meters\n        post_north = haversine(lat, lon, lat + res_lat, lon)\n        post_east = haversine(lat, lon, lat, lon + res_lon)\n    \n    # compute resampling factors for the DEM\n    ovs_lat = post_north / spacing\n    ovs_lon = post_east / spacing\n    return ovs_lat, ovs_lon\n\n\ndef pixel_area_wrap(image, namespace, lut, exist_ok=False,\n                    logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    helper function for computing pixel_area files in function geocode.\n\n    Parameters\n    ----------\n    image: str\n        the reference SAR image\n    namespace: pyroSAR.gamma.auxil.Namespace\n        an object collecting all output file names\n    lut: str\n        the name of the lookup table\n    exist_ok: bool\n        allow existing output files and do not create new ones?\n    logpath: str\n        a directory to write command logfiles to\n    outdir: str\n        the directory to execute the command in\n    shellscript: str\n        a file to write the GAMMA commands to in shell format\n\n    Returns\n    -------\n\n    \"\"\"\n    image_par = ISPPar(image + '.par')\n    \n    if namespace.isappreciated('gs_ratio'):\n        namespace.appreciate(['pix_area_sigma0', 'pix_area_gamma0'])\n    \n    pixel_area_args = {'MLI_par': image + '.par',\n                       'DEM_par': namespace.dem_seg_geo + '.par',\n                       'DEM': namespace.dem_seg_geo,\n                       'lookup_table': lut,\n                       'ls_map': namespace.ls_map_geo,\n                       'inc_map': namespace.inc_geo,\n                       'pix_sigma0': namespace.pix_area_sigma0,\n                       'pix_gamma0': namespace.pix_area_gamma0,\n                       'logpath': logpath,\n                       'outdir': outdir,\n                       'shellscript': shellscript}\n    \n    radcal_mli_args = {'MLI': image,\n                       'MLI_par': image + '.par',\n                       'OFF_par': '-',\n                       'CMLI': image + '_cal',\n                       'refarea_flag': 1,  # calculate sigma0, scale area by sin(inc_ang)/sin(ref_inc_ang)\n                       'pix_area': namespace.pix_ellip_sigma0,\n                       'logpath': logpath,\n                       'outdir': outdir,\n                       'shellscript': shellscript}\n    \n    # newer versions of GAMMA enable creating the ratio of ellipsoid-based\n    # pixel area and DEM-facet pixel area directly with command pixel_area\n    if hasarg(diff.pixel_area, 'sig2gam_ratio'):\n        namespace.appreciate(['pix_ratio'])\n        pixel_area_args['sig2gam_ratio'] = namespace.pix_ratio\n        if do_execute(pixel_area_args, ['pix_sigma0', 'pix_gamma0', 'sig2gam_ratio'], exist_ok):\n            diff.pixel_area(**pixel_area_args)\n        \n        if namespace.isappreciated('pix_ellip_sigma0'):\n            if do_execute(radcal_mli_args, ['pix_area'], exist_ok):\n                isp.radcal_MLI(**radcal_mli_args)\n                par2hdr(image + '.par', image + '_cal.hdr')\n    else:\n        # sigma0 = MLI * ellip_pix_sigma0 / pix_area_sigma0\n        # gamma0 = MLI * ellip_pix_sigma0 / pix_area_gamma0\n        namespace.appreciate(['pix_area_gamma0', 'pix_ellip_sigma0', 'pix_ratio'])\n        pixel_area_args['pix_gamma0'] = namespace.pix_area_gamma0\n        radcal_mli_args['pix_area'] = namespace.pix_ellip_sigma0\n        \n        # actual illuminated area as obtained from integrating DEM-facets (pix_area_sigma0 | pix_area_gamma0)\n        if do_execute(pixel_area_args, ['pix_sigma0', 'pix_gamma0'], exist_ok):\n            diff.pixel_area(**pixel_area_args)\n        \n        # ellipsoid-based pixel area (ellip_pix_sigma0)\n        if do_execute(radcal_mli_args, ['pix_area'], exist_ok):\n            isp.radcal_MLI(**radcal_mli_args)\n            par2hdr(image + '.par', image + '_cal.hdr')\n        \n        if os.path.isfile(image + '.hdr'):\n            for item in ['pix_area_sigma0', 'pix_area_gamma0', 'pix_ellip_sigma0']:\n                if namespace.isappreciated(item):\n                    hdr_out = namespace[item] + '.hdr'\n                    c1 = not os.path.isfile(hdr_out)\n                    c2 = os.path.isfile(hdr_out) and not exist_ok\n                    if c1 or c2:\n                        shutil.copy(src=image + '.hdr', dst=hdr_out)\n        \n        # ratio of ellipsoid-based pixel area and DEM-facet pixel area\n        c1 = not os.path.isfile(namespace.pix_ratio)\n        c2 = os.path.isfile(namespace.pix_ratio) and not exist_ok\n        if c1 or c2:\n            if 'lat' in locals():\n                lat.ratio(d1=namespace.pix_ellip_sigma0,\n                          d2=namespace.pix_area_gamma0,\n                          ratio=namespace.pix_ratio,\n                          width=image_par.range_samples,\n                          bx=1,\n                          by=1,\n                          logpath=logpath,\n                          outdir=outdir,\n                          shellscript=shellscript)\n            else:\n                for item in ['pix_area_gamma0', 'pix_ellip_sigma0']:\n                    par2hdr(image + '.par', namespace[item] + '.hdr')\n                lat_ratio(data_in1=namespace.pix_ellip_sigma0,\n                          data_in2=namespace.pix_area_gamma0,\n                          data_out=namespace.pix_ratio)\n    \n    if namespace.isappreciated('gs_ratio'):\n        c1 = not os.path.isfile(namespace.gs_ratio)\n        c2 = os.path.isfile(namespace.gs_ratio) and not exist_ok\n        if c1 or c2:\n            if 'lat' in locals():\n                lat.ratio(d1=namespace.pix_area_gamma0,\n                          d2=namespace.pix_area_sigma0,\n                          ratio=namespace.gs_ratio,\n                          width=image_par.range_samples,\n                          bx=1,\n                          by=1,\n                          logpath=logpath,\n                          outdir=outdir,\n                          shellscript=shellscript)\n            else:\n                for item in ['pix_area_gamma0', 'pix_area_sigma0']:\n                    par2hdr(image + '.par', namespace[item] + '.hdr')\n                lat_ratio(data_in1=namespace.pix_area_gamma0,\n                          data_in2=namespace.pix_area_sigma0,\n                          data_out=namespace.gs_ratio)\n    \n    for item in ['pix_area_sigma0', 'pix_area_gamma0',\n                 'pix_ratio', 'pix_ellip_sigma0', 'gs_ratio']:\n        if namespace.isappreciated(item):\n            hdr_out = namespace[item] + '.hdr'\n            c1 = not os.path.isfile(item)\n            c2 = os.path.isfile(hdr_out) and not exist_ok\n            if c1 or c2:\n                par2hdr(image + '.par', hdr_out)\n\n\ndef S1_deburst(burst1, burst2, burst3, name_out, rlks=5, azlks=1,\n               replace=False, logpath=None, outdir=None, shellscript=None):\n    \"\"\"\n    Debursting of Sentinel-1 SLC imagery in GAMMA\n    \n    The procedure consists of two steps. First antenna pattern deramping and\n    then mosaicing of the single deramped bursts.\n    For mosaicing, the burst boundaries are calculated from the number of looks in range (`rlks`)\n    and azimuth (`azlks`), in this case 5 range looks and 1 azimuth looks.\n    Alternately 10 range looks and 2 azimuth looks could be used.\n    \n    Parameters\n    ----------\n    burst1: str\n        burst image 1\n    burst2: str\n        burst image 2\n    burst3: str\n        burst image 3\n    name_out: str\n        the name of the output file\n    rlks: int\n        the number of looks in range\n    azlks: int\n        the number of looks in azimuth\n    replace: bool\n        replace the burst images by the new file? If True, the three burst images will be deleted.\n    logpath: str or None\n        a directory to write command logfiles to\n    outdir: str or None\n        the directory to execute the command in\n    shellscript: str or None\n        a file to write the Gamma commands to in shell format\n\n    Returns\n    -------\n    \n    \"\"\"\n    for burst in [burst1, burst2, burst3]:\n        if not os.path.isfile(burst) or not os.path.isfile(burst + '.par') or not os.path.isfile(burst + '.tops_par'):\n            raise IOError('input files missing; parameter files must be named e.g. {burst1}.par and {burst1}.tops_par')\n    outpath = os.path.dirname(name_out)\n    if not os.path.isdir(outpath):\n        os.makedirs(outpath)\n    tab_in = os.path.join(outpath, 'tab_deramp1')\n    tab_out = os.path.join(outpath, 'tab_deramp2')\n    with open(tab_in, 'w') as out1:\n        with open(tab_out, 'w') as out2:\n            for item in [burst1, burst2, burst3]:\n                out1.write(item + '\\t' + item + '.par\\t' + item + '.tops_par\\n')\n                out2.write(item + '_drp\\t' + item + '_drp.par\\t' + item + '_drp.tops_par\\n')\n    \n    isp.SLC_deramp_ScanSAR(SLC1_tab=tab_in,\n                           SLC2_tab=tab_out,\n                           mode=0,\n                           phflg=0,\n                           logpath=logpath,\n                           outdir=outdir,\n                           shellscript=shellscript)\n    \n    new = 'SLC_mosaic_ScanSAR'\n    old = 'SLC_mosaic_S1_TOPS'\n    slc_mosaic = new if hasattr(isp, new) else old\n    getattr(isp, slc_mosaic)(SLC_tab=tab_out,\n                             SLC=name_out,\n                             SLC_par=name_out + '.par',\n                             rlks=rlks,\n                             azlks=azlks,\n                             logpath=logpath,\n                             outdir=outdir,\n                             shellscript=shellscript)\n    if replace:\n        for item in [burst1, burst2, burst3]:\n            for subitem in [item + x for x in ['', '.par', '.tops_par']]:\n                os.remove(subitem)\n    for item in [burst1, burst2, burst3]:\n        for subitem in [item + x for x in ['_drp', '_drp.par', '_drp.tops_par']]:\n            os.remove(subitem)\n    os.remove(tab_in)\n    os.remove(tab_out)\n"
  },
  {
    "path": "pyroSAR/install/download_egm96_15.gtx.sh",
    "content": "#!/usr/bin/env bash\n# download EGM96 geoid model to convert heights with GDAL\ncd /usr/share/proj\nsudo wget https://download.osgeo.org/proj/vdatum/egm96_15/egm96_15.gtx\nsudo chmod 644 egm96_15.gtx\n"
  },
  {
    "path": "pyroSAR/install/download_testdata.sh",
    "content": "#!/usr/bin/env bash\n\n\nmkdir -p $TESTDATA_DIR\n\n#cd $TESTDATA_DIR\n\necho \"Start Download forest_brazil\"\nwget --quiet -P $TESTDATA_DIR 'ftp://ftp.eorc.jaxa.jp/pub/ALOS-2/1501sample/310_forestbrazil/0000022708_001001_ALOS2015976960-140909.zip'\necho \"End download forest_brazil\"\n"
  },
  {
    "path": "pyroSAR/install/install_deps.sh",
    "content": "#!/usr/bin bash\n##############################################################\n# manual installation of pyroSAR dependencies\n# GDAL, GEOS, PROJ, SpatiaLite\n# John Truckenbrodt, Rhys Kidd 2017-2019\n##############################################################\n\n\n# define a root directory for downloading packages\nroot=$HOME/test\n\n# define a directory for download and unpacked packages\ndownloaddir=${root}/originals\npackagedir=${root}/packages\n\n# define the installation directory; This needs to be outside of the root directory so that the latter can be deleted in the end.\n# In case installdir is set to a location outside of /usr/*, the following installation commands do not need to be run with\n# administration rights (sudo)\n#installdir=/usr/local\ninstalldir=$HOME/local\n\n# the version of GDAL and its dependencies\nGDALVERSION=3.0.1\n\n# these versions are not quite as important. If you use already installed them you might need to define their location\n# for the configuration of GDAL\ngeos_version=3.7.2\nproj_version=6.1.1\nspatialite_version=4.3.0\n\n# define the number of threads for compilation\nthreads=2\n########################################################################################################################\n# setup environment variables and create directories\n\nif [[ -d \"${root}\" ]]; then\n    if [[  \"$(ls -A ${root})\" ]]; then\n        echo \"Error! root already exists. Please choose a fresh directory which can be deleted once finished\" 1>&2\n        #exit 64\n    fi\nfi\n\nexport PATH=${installdir}/bin:$PATH\nexport LD_LIBRARY_PATH=${installdir}/lib:$LD_LIBRARY_PATH\n\n\nfor dir in ${root} ${downloaddir} ${packagedir} ${installdir}; do\n    mkdir -p ${dir}\ndone\n########################################################################################################################\n# download GDAL and its dependencies\n\ndeclare -a remotes=(\n                \"https://download.osgeo.org/gdal/$GDALVERSION/gdal-$GDALVERSION.tar.gz\"\n                \"https://download.osgeo.org/geos/geos-$geos_version.tar.bz2\"\n                \"https://download.osgeo.org/proj/proj-$proj_version.tar.gz\"\n                \"https://www.gaia-gis.it/gaia-sins/libspatialite-sources/libspatialite-$spatialite_version.tar.gz\"\n                )\n\nfor package in \"${remotes[@]}\"; do\n    wget ${package} -nc -P ${downloaddir}\ndone\n########################################################################################################################\n# unpack downloaded archives\n\nfor package in ${downloaddir}/*tar.gz; do\n    tar xfvz ${package} -C ${packagedir}\ndone\nfor package in ${downloaddir}/*tar.bz2; do\n    tar xfvj ${package} -C ${packagedir}\ndone\n########################################################################################################################\n# install GEOS\n\ncd ${packagedir}/geos*\n./configure --prefix ${installdir}\nmake -j${threads}\nsudo make install\n########################################################################################################################\n# install PROJ\n\ncd ${packagedir}/proj*\n./configure --prefix ${installdir}\nmake -j${threads}\nsudo make install\n########################################################################################################################\n# install spatialite\n\ncd ${packagedir}/libspatialite*\n\n# PROJ now uses a new API, using the old deprecated one (as done by spatialite) needs to be indicated explicitly\n./configure --prefix=${installdir} \\\n            CFLAGS=-DACCEPT_USE_OF_DEPRECATED_PROJ_API_H\n\nmake -j${threads}\nsudo make install\n########################################################################################################################\n# install GDAL\n\n# please check the output of configure to make sure that the GEOS and PROJ drivers are enabled\n# otherwise you might need to define the locations of the packages\n\npython_bin=/usr/bin/python3.6\n\ncd ${packagedir}/gdal*\n./configure --prefix ${installdir} \\\n            --with-python=${python_bin} \\\n            --with-geos=${installdir}/bin/geos-config \\\n            --with-proj=${installdir} \\\n            --with-spatialite=${installdir}\n\nmake -j${threads}\nsudo make install\n########################################################################################################################\n# install GDAL Python binding inside a virtual environment\n\npython -m pip install gdal==$GDALVERSION --global-option=build_ext --user --global-option=\"-I$installdir/include\"\n########################################################################################################################\n########################################################################################################################\n# install pysqlite2 python package with static sqlite3 build\n# this needs git to be installed\n\ncd ${packagedir}\ngit clone https://github.com/ghaering/pysqlite.git\ncd pysqlite\n\nwget https://sqlite.org/2019/sqlite-amalgamation-3290000.zip\n\nunzip sqlite-amalgamation-3290000.zip\ncp sqlite-amalgamation-3290000/* .\n\nsudo python setup.py build_static install --prefix=${installdir}\n########################################################################################################################\n########################################################################################################################\n# finishing the process\n\necho depending on your choice of installdir and Python version you might need to add the following lines to your .bashrc:\necho \"export PATH=${installdir}/bin:$\"PATH\necho \"export LD_LIBRARY_PATH=${installdir}/lib:$\"LD_LIBRARY_PATH\necho \"export PYTHONPATH=${installdir}/lib64/python3.6/site-packages:$\"PYTHONPATH\necho \"done\"\n\n# deleting the root directory which is no longer needed\nsudo rm -rf ${root}\n"
  },
  {
    "path": "pyroSAR/patterns.py",
    "content": "###############################################################################\n# Reading and Organizing system for SAR images\n# Copyright (c) 2016-2023, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\n\"\"\"\nThis file contains regular expressions to identify SAR products.\nThe pattern 'pyrosar' identifies products in pyroSAR's unified naming scheme.\nThe names of all other expressions correspond to the classes found in pyroSAR.drivers.\n\"\"\"\npyrosar = r'(?:.*[/\\\\]|)' \\\n          r'(?P<outname_base>' \\\n          r'(?P<sensor>[A-Z0-9]{1,4})_+' \\\n          r'(?P<acquisition_mode>[A-Z0-9]{1,4})_+' \\\n          r'(?P<orbit>[AD])_' \\\n          r'(?P<start>[0-9T]{15})' \\\n          r'(?:_(?P<extensions>\\w*?)|)' \\\n          r')_*' \\\n          r'(?:(?P<polarization>[HV]{2})_' \\\n          r'(?P<proc_steps>[\\w-]*)|)' \\\n          r'(?P<filetype>(?:.tif|.nc|))$'\n\nceos_ers = r'(?P<product_id>(?:SAR|ASA)_(?:IM(?:S|P|G|M|_)|AP(?:S|P|G|M|_)|WV(?:I|S|W|_)|WS(?:M|S|_))_[012B][CP])' \\\n           r'(?P<processing_stage_flag>[A-Z])' \\\n           r'(?P<originator_ID>[A-Z\\-]{3})' \\\n           r'(?P<start_day>[0-9]{8})_' \\\n           r'(?P<start_time>[0-9]{6})_' \\\n           r'(?P<duration>[0-9]{8})' \\\n           r'(?P<phase>[0-9A-Z]{1})' \\\n           r'(?P<cycle>[0-9]{3})_' \\\n           r'(?P<relative_orbit>[0-9]{5})_' \\\n           r'(?P<absolute_orbit>[0-9]{5})_' \\\n           r'(?P<counter>[0-9]{4,})\\.' \\\n           r'(?P<satellite_ID>[EN][12])' \\\n           r'(?P<extension>(?:\\.zip|\\.tar\\.gz|\\.PS|))$'\n\nceos_psr1 = r'^LED-ALPSR' \\\n            r'(?P<sub>P|S)' \\\n            r'(?P<orbit>[0-9]{5})' \\\n            r'(?P<frame>[0-9]{4})-' \\\n            r'(?P<mode>[HWDPC])' \\\n            r'(?P<level>1\\.[015])' \\\n            r'(?P<proc>G|_)' \\\n            r'(?P<proj>[UPML_])' \\\n            r'(?P<orbit_dir>A|D)$'\n\nceos_psr2 = r'^LED-ALOS2' \\\n            r'(?P<orbit>[0-9]{5})' \\\n            r'(?P<frame>[0-9]{4})-' \\\n            r'(?P<date>[0-9]{6})-' \\\n            r'(?P<mode>SBS|UBS|UBD|HBS|HBD|HBQ|FBS|FBD|FBQ|WBS|WBD|WWS|WWD|VBS|VBD)' \\\n            r'(?P<look_dir>L|R)' \\\n            r'(?P<level>1\\.0|1\\.1|1\\.5|2\\.1|3\\.1)' \\\n            r'(?P<proc>[GR_])' \\\n            r'(?P<proj>[UPML_])' \\\n            r'(?P<orbit_dir>A|D)$'\n\neorc_psr = r'^PSR2-' \\\n           r'(?P<prodlevel>SLTR)_' \\\n           r'(?P<pathnr>RSP[0-9]{3})_' \\\n           r'(?P<date>[0-9]{8})' \\\n           r'(?P<mode>FBD|WBD)' \\\n           r'(?P<beam>[0-9]{2})' \\\n           r'(?P<orbit_dir>A|D)' \\\n           r'(?P<look_dir>L|R)_' \\\n           r'(?P<replay_id1>[0-9A-Z]{16})-' \\\n           r'(?P<replay_id2>[0-9A-Z]{5})_' \\\n           r'(?P<internal>[0-9]{3})_' \\\n           r'HDR$'\n\nesa = r'(?P<product_id>(?:SAR|ASA)_(?:IM(?:S|P|G|M|_)|AP(?:S|P|G|M|_)|WV(?:I|S|W|_)|WS(?:M|S|_))_[012B][CP])' \\\n      r'(?P<processing_stage_flag>[A-Z])' \\\n      r'(?P<originator_ID>[A-Z\\-]{3})' \\\n      r'(?P<start_day>[0-9]{8})_' \\\n      r'(?P<start_time>[0-9]{6})_' \\\n      r'(?P<duration>[0-9]{8})' \\\n      r'(?P<phase>[0-9A-Z]{1})' \\\n      r'(?P<cycle>[0-9]{3})_' \\\n      r'(?P<relative_orbit>[0-9]{5})_' \\\n      r'(?P<absolute_orbit>[0-9]{5})_' \\\n      r'(?P<counter>[0-9]{4,})\\.' \\\n      r'(?P<satellite_ID>[EN][12])'\n\nsafe = r'^(?P<sensor>S1[ABCD])_' \\\n       r'(?P<beam>S1|S2|S3|S4|S5|S6|IW|EW|WV|EN|N1|N2|N3|N4|N5|N6|IM)_' \\\n       r'(?P<product>SLC|GRD|OCN)' \\\n       r'(?P<resolution>F|H|M|_)_' \\\n       r'(?P<processingLevel>1|2)' \\\n       r'(?P<category>S|A)' \\\n       r'(?P<pols>SH|SV|DH|DV|VV|HH|HV|VH)_' \\\n       r'(?P<start>[0-9]{8}T[0-9]{6})_' \\\n       r'(?P<stop>[0-9]{8}T[0-9]{6})_' \\\n       r'(?P<orbitNumber>[0-9]{6})_' \\\n       r'(?P<dataTakeID>[0-9A-F]{6})_' \\\n       r'(?P<productIdentifier>[0-9A-F]{4})' \\\n       r'\\.SAFE$'\n\ntsx = r'^(?P<sat>T[DS]X1)_SAR__' \\\n      r'(?P<prod>SSC|MGD|GEC|EEC)_' \\\n      r'(?P<var>____|SE__|RE__|MON1|MON2|BTX1|BRX2)_' \\\n      r'(?P<mode>SM|SL|HS|HS300|ST|SC)_' \\\n      r'(?P<pols>[SDTQ])_' \\\n      r'(?:SRA|DRA)_' \\\n      r'(?P<start>[0-9]{8}T[0-9]{6})_' \\\n      r'(?P<stop>[0-9]{8}T[0-9]{6})(?:\\.xml|)$'\n\ntdm = r'^(?P<sat>T[D]M1)_SAR__' \\\n      r'(?P<prod>COS)_' \\\n      r'(?P<var>____|MONO|BIST|ALT1|ALT2)_' \\\n      r'(?P<mode>SM|SL|HS)_' \\\n      r'(?P<pols>[SDQ])_' \\\n      r'(?:SRA|DRA)_' \\\n      r'(?P<start>[0-9]{8}T[0-9]{6})_' \\\n      r'(?P<stop>[0-9]{8}T[0-9]{6})(?:\\.xml|)$'\n"
  },
  {
    "path": "pyroSAR/snap/__init__.py",
    "content": "from .util import geocode, noise_power\nfrom .auxil import gpt\n"
  },
  {
    "path": "pyroSAR/snap/auxil.py",
    "content": "###############################################################################\n# pyroSAR SNAP API tools\n\n# Copyright (c) 2017-2025, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\nimport os\nimport re\nimport copy\nimport shutil\nimport traceback\nimport subprocess as sp\nfrom xml.dom import minidom\nimport xml.etree.ElementTree as ET\n\nfrom pyroSAR import identify\nfrom pyroSAR.examine import ExamineSnap\nfrom pyroSAR.ancillary import windows_fileprefix, multilook_factors, Lock\nfrom pyroSAR.auxdata import get_egm_lookup\n\nfrom spatialist import Vector, Raster, vectorize, rasterize, boundary, intersect, bbox\nfrom spatialist.auxil import gdal_translate, crsConvert\nfrom spatialist.ancillary import finder, run\n\nfrom osgeo import gdal\nfrom osgeo.gdalconst import GA_Update\n\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef parse_recipe(name):\n    \"\"\"\n    parse a SNAP recipe\n    \n    Parameters\n    ----------\n    name: str\n        the name of the recipe; current options:\n         * `blank`: a workflow without any nodes\n         * `geocode`: a basic workflow containing `Read`, `Apply-Orbit-File`,\n           `Calibration`, `Terrain-Flattening` and `Write` nodes\n\n    Returns\n    -------\n    Workflow\n        the parsed recipe\n    \n    Examples\n    --------\n    >>> from pyroSAR.snap.auxil import parse_recipe\n    >>> workflow = parse_recipe('base')\n    \"\"\"\n    name = name if name.endswith('.xml') else name + '.xml'\n    absname = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'recipes', name)\n    return Workflow(absname)\n\n\ndef parse_node(name, use_existing=True):\n    \"\"\"\n    parse an XML node recipe. The XML representation and parameter default values are read from the docstring of an\n    individual node by calling `gpt <node> -h`. The result is then written to an XML text file under\n    `$HOME/.pyroSAR/snap/nodes` which is subsequently read for parsing instead of again calling `gpt`.\n    \n    Parameters\n    ----------\n    name: str\n        the name of the processing node, e.g. Terrain-Correction\n    use_existing: bool\n        use an existing XML text file or force reparsing the gpt docstring and overwriting the XML file?\n\n    Returns\n    -------\n    Node\n        the parsed node\n    \n    Examples\n    --------\n    >>> tnr = parse_node('ThermalNoiseRemoval')\n    >>> print(tnr.parameters)\n    {'selectedPolarisations': None, 'removeThermalNoise': 'true', 'reIntroduceThermalNoise': 'false'}\n    \"\"\"\n    snap = ExamineSnap()\n    version = snap.get_version('microwavetbx')\n    name = name if name.endswith('.xml') else name + '.xml'\n    operator = os.path.splitext(name)[0]\n    nodepath = os.path.join(os.path.expanduser('~'), '.pyrosar', 'snap', 'nodes')\n    abspath = os.path.join(nodepath, version)\n    os.makedirs(abspath, exist_ok=True)\n    absname = os.path.join(abspath, name)\n    \n    # remove all old XML files that were not stored in a version subdirectory\n    deprecated = finder(nodepath, ['*.xml'], recursive=False)\n    for item in deprecated:\n        os.remove(item)\n    \n    with Lock(absname):\n        if not os.path.isfile(absname) or not use_existing:\n            gpt = snap.gpt\n            \n            cmd = [gpt, operator, '-h']\n            \n            returncode, out, err = run(cmd=cmd, void=False)\n            \n            if re.search('Unknown operator', out + err):\n                raise RuntimeError(\"unknown operator '{}'\".format(operator))\n            \n            graph = re.search('<graph id.*', out, flags=re.DOTALL).group()\n            # remove placeholder values like ${value}\n            graph = re.sub(r'>\\${.*', '/>', graph)\n            # remove <.../> placeholders\n            graph = re.sub(r'<\\.\\.\\./>.*', '', graph)\n            if operator == 'BandMaths':\n                graph = graph.replace('sourceProducts', 'sourceProduct')\n            tree = ET.fromstring(graph)\n            for elt in tree.iter():\n                if elt.text in ['string', 'double', 'integer', 'float']:\n                    elt.text = None\n            node = tree.find('node')\n            node.attrib['id'] = operator\n            # add a second source product entry for multi-source nodes\n            # multi-source nodes are those with an entry 'sourceProducts'\n            # instead of 'sourceProduct'\n            # exceptions are registered in this list:\n            multisource = ['Back-Geocoding']\n            if operator != 'Read' and operator != 'ProductSet-Reader':\n                source = node.find('.//sources')\n                child = source[0]\n                if child.tag == 'sourceProducts' or operator in multisource:\n                    child2 = ET.SubElement(source,\n                                           'sourceProduct.1',\n                                           {'refid': 'Read (2)'})\n                child.tag = 'sourceProduct'\n                child.attrib['refid'] = 'Read'\n                child.text = None\n            \n            # cleanup the BandMaths node\n            if operator == 'BandMaths':\n                tband = tree.find('.//targetBand')\n                allowed = ['name', 'type', 'expression',\n                           'description', 'unit', 'noDataValue']\n                invalid = [x.tag for x in tband if x.tag not in allowed]\n                for tag in invalid:\n                    el = tband.find(f'.//{tag}')\n                    tband.remove(el)\n                for item in ['targetBands', 'variables']:\n                    elem = tree.find(f'.//{item}')\n                    pl = elem.find('.//_.002e..')\n                    elem.remove(pl)\n            \n            # add a class parameter and create the Node object\n            value = 'com.bc.ceres.binding.dom.XppDomElement'\n            tree.find('.//parameters').set('class', value)\n            node = Node(node)\n            \n            # read the default values from the parameter documentation\n            parameters = node.parameters.keys()\n            out += '-P'\n            for parameter in parameters:\n                p1 = r'-P{}.*?-P'.format(parameter)\n                p2 = r\"Default\\ value\\ is '([a-zA-Z0-9 ._\\(\\)]+)'\"\n                r1 = re.search(p1, out, re.S)\n                if r1:\n                    sub = r1.group()\n                    r2 = re.search(p2, sub)\n                    if r2:\n                        value = r2.groups()[0]\n                        node.parameters[parameter] = value\n                        continue\n                node.parameters[parameter] = None\n            \n            # fill in some additional defaults\n            if operator == 'BandMerge':\n                node.parameters['geographicError'] = '1.0E-5'\n            \n            with open(absname, 'w') as xml:\n                xml.write(str(node))\n            return node\n        else:\n            with open(absname, 'r') as workflow:\n                element = ET.fromstring(workflow.read())\n            return Node(element)\n\n\ndef execute(xmlfile, cleanup=True, gpt_exceptions=None, gpt_args=None):\n    \"\"\"\n    execute SNAP workflows via the Graph Processing Tool GPT.\n    This function merely calls gpt with some additional command\n    line arguments and raises a RuntimeError on fail. This\n    function is used internally by function :func:`gpt`.\n    \n    Parameters\n    ----------\n    xmlfile: str\n        the name of the workflow XML file\n    cleanup: bool\n        should all files written to the temporary directory during function execution be deleted after processing?\n    gpt_exceptions: dict\n        a dictionary to override the configured GPT executable for certain operators;\n        each (sub-)workflow containing this operator will be executed with the define executable;\n        \n         - e.g. ``{'Terrain-Flattening': '/home/user/snap/bin/gpt'}``\n    gpt_args: list or None\n        a list of additional arguments to be passed to the GPT call\n        \n        - e.g. ``['-x', '-c', '2048M']`` for increased tile cache size and intermediate clearing\n    \n    Returns\n    -------\n    \n    Raises\n    ------\n    RuntimeError\n    \"\"\"\n    # read the file and extract some information\n    workflow = Workflow(xmlfile)\n    write = workflow['Write']\n    outname = write.parameters['file']\n    workers = [x.id for x in workflow if x.operator not in ['Read', 'Write']]\n    message = ' -> '.join(workers)\n    gpt_exec = None\n    if gpt_exceptions is not None:\n        for item, exec in gpt_exceptions.items():\n            if item in workers:\n                gpt_exec = exec\n                message += ' (using {})'.format(exec)\n                break\n    log.info(message)\n    # try to find the GPT executable\n    if gpt_exec is None:\n        try:\n            gpt_exec = ExamineSnap().gpt\n        except AttributeError:\n            raise RuntimeError('could not find SNAP GPT executable')\n    # create the list of arguments to be passed to the subprocess module calling GPT\n    cmd = [gpt_exec, '-e']\n    if isinstance(gpt_args, list):\n        cmd.extend(gpt_args)\n    if format == 'GeoTiff-BigTIFF':\n        cmd.extend([\n            # '-Dsnap.dataio.reader.tileWidth=*',\n            # '-Dsnap.dataio.reader.tileHeight=1',\n            '-Dsnap.dataio.bigtiff.tiling.width=256',\n            '-Dsnap.dataio.bigtiff.tiling.height=256',\n            # '-Dsnap.dataio.bigtiff.compression.type=LZW',\n            # '-Dsnap.dataio.bigtiff.compression.quality=0.75'\n        ])\n    cmd.append(xmlfile)\n    # execute the workflow\n    proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)\n    out, err = proc.communicate()\n    out = out.decode('utf-8') if isinstance(out, bytes) else out\n    err = err.decode('utf-8') if isinstance(err, bytes) else err\n    \n    # check for a message indicating an unknown parameter,\n    # which can easily be removed from the workflow\n    pattern = r\"Error: \\[NodeId: (?P<id>[a-zA-Z0-9-_]*)\\] \" \\\n              r\"Operator \\'[a-zA-Z0-9-_]*\\': \" \\\n              r\"Unknown element \\'(?P<par>[a-zA-Z]*)\\'\"\n    match = re.search(pattern, err)\n    \n    if proc.returncode == 0:\n        pattern = r'(?P<level>WARNING: )([a-zA-Z.]*: )(?P<message>No intersection.*)'\n        match = re.search(pattern, err)\n        if match is not None:\n            raise RuntimeError(re.search(pattern, err).group('message'))\n        return\n    \n    # delete unknown parameters and run the modified workflow\n    elif proc.returncode == 1 and match is not None:\n        replace = match.groupdict()\n        with Workflow(xmlfile) as flow:\n            log.info('  removing parameter {id}:{par} and executing modified workflow'.format(**replace))\n            node = flow[replace['id']]\n            del node.parameters[replace['par']]\n            flow.write(xmlfile)\n        execute(xmlfile, cleanup=cleanup, gpt_exceptions=gpt_exceptions,\n                gpt_args=gpt_args)\n    \n    # append additional information to the error message and raise an error\n    else:\n        if proc.returncode == -9:\n            submessage = '[{}] the process was killed by SNAP (process return code -9). ' \\\n                         'One possible cause is a lack of memory.'.format(os.path.basename(xmlfile))\n        else:\n            submessage = '{}{}\\n[{}] failed with return code {}'\n        if cleanup:\n            if os.path.isfile(outname + '.tif'):\n                os.remove(outname + '.tif')\n            elif os.path.isdir(outname):\n                shutil.rmtree(outname, onerror=windows_fileprefix)\n            elif outname.endswith('.dim') and os.path.isfile(outname):\n                os.remove(outname)\n                datadir = outname.replace('.dim', '.data')\n                if os.path.isdir(datadir):\n                    shutil.rmtree(datadir,\n                                  onerror=windows_fileprefix)\n        raise RuntimeError(submessage.format(out, err, os.path.basename(xmlfile), proc.returncode))\n\n\ndef gpt(xmlfile, tmpdir, groups=None, cleanup=True,\n        gpt_exceptions=None, gpt_args=None,\n        removeS1BorderNoiseMethod='pyroSAR'):\n    \"\"\"\n    Wrapper for ESA SNAP's Graph Processing Tool GPT.\n    Input is a readily formatted workflow XML file as for example\n    created by function :func:`~pyroSAR.snap.util.geocode`.\n    Additional to calling GPT, this function will\n    \n    - (if processing Sentinel-1 GRD data with IPF version <2.9 and ``removeS1BorderNoiseMethod='pyroSAR'``)\n      unpack the scene and perform the custom removal (:func:`pyroSAR.S1.removeGRDBorderNoise`).\n    - if `groups` is not None:\n    \n      * split the workflow into sub-workflows (:func:`pyroSAR.snap.auxil.split`)\n      * execute the sub-workflows (:func:`pyroSAR.snap.auxil.execute`)\n    \n    Note\n    ----\n    Depending on the parametrization this function might create two subdirectories in `tmpdir`,\n    bnr for S1 GRD border noise removal and sub for sub-workflows and their intermediate outputs.\n    Both are deleted if ``cleanup=True``. If `tmpdir` is empty afterward it is also deleted.\n    \n    Parameters\n    ----------\n    xmlfile: str\n        the name of the workflow XML file\n    tmpdir: str\n        a temporary directory for storing intermediate files\n    groups: list[list[str]] or None\n        a list of lists each containing IDs for individual nodes. If not None, the workflow is split into\n        sub-workflows executing the nodes in the respective group. These workflows and their output products\n        are stored into the subdirectory sub of `tmpdir`.\n    cleanup: bool\n        should all temporary files be deleted after processing? First, the subdirectories bnr and sub of `tmpdir`\n        are deleted. If `tmpdir` is empty afterward it is also deleted.\n    gpt_exceptions: dict or None\n        a dictionary to override the configured GPT executable for certain operators;\n        each (sub-)workflow containing this operator will be executed with the define executable;\n        \n         - e.g. ``{'Terrain-Flattening': '/home/user/snap/bin/gpt'}``\n    \n    gpt_args: list[str] or None\n        a list of additional arguments to be passed to the gpt call\n        \n         - e.g. ``['-x', '-c', '2048M']`` for increased tile cache size and intermediate clearing\n    \n    removeS1BorderNoiseMethod: str\n        the border noise removal method to be applied, See :func:`pyroSAR.S1.removeGRDBorderNoise` for details;\n        one of the following:\n        \n         - 'ESA': the pure implementation as described by ESA\n         - 'pyroSAR': the ESA method plus the custom pyroSAR refinement. This is only applied if the IPF version is\n           < 2.9 where additional noise removal was necessary. The output of the additional noise removal is stored\n           in the subdirectory bnr of `tmpdir`.\n    \n    Returns\n    -------\n    \n    Raises\n    ------\n    \n    \"\"\"\n    workflow = Workflow(xmlfile)\n    \n    if 'ProductSet-Reader' in workflow.operators:\n        read = workflow['ProductSet-Reader']\n        scene = identify(read.parameters['fileList'].split(',')[0])\n    else:\n        read = workflow['Read']\n        scene = identify(read.parameters['file'])\n    \n    tmpdir_bnr = os.path.join(tmpdir, 'bnr')\n    tmpdir_sub = os.path.join(tmpdir, 'sub')\n    \n    if 'Remove-GRD-Border-Noise' in workflow.ids \\\n            and removeS1BorderNoiseMethod == 'pyroSAR' \\\n            and scene.meta['IPF_version'] < 2.9:\n        if 'SliceAssembly' in workflow.operators:\n            raise RuntimeError(\"pyroSAR's custom border noise removal is not yet implemented for multiple scene inputs\")\n        os.makedirs(tmpdir_bnr, exist_ok=True)\n        xmlfile = os.path.join(tmpdir_bnr,\n                               os.path.basename(xmlfile.replace('_bnr', '')))\n        # border noise removal is done outside of SNAP and the node is thus removed from the workflow\n        del workflow['Remove-GRD-Border-Noise']\n        # remove the node name from the groups\n        i = 0\n        while i < len(groups):\n            if 'Remove-GRD-Border-Noise' in groups[i]:\n                del groups[i][groups[i].index('Remove-GRD-Border-Noise')]\n            if len(groups[i]) == 0:\n                del groups[i]\n            elif len(groups[i]) == 1 and groups[i][0] == 'Read':\n                # move Read into the next group if it is the only operator\n                del groups[i]\n                groups[i].insert(0, 'Read')\n            else:\n                i += 1\n        # unpack the scene if necessary and perform the custom border noise removal\n        log.info('unpacking scene')\n        if scene.compression is not None:\n            scene.unpack(tmpdir_bnr)\n        log.info('removing border noise..')\n        scene.removeGRDBorderNoise(method=removeS1BorderNoiseMethod)\n        # change the name of the input file to that of the unpacked archive\n        read.parameters['file'] = scene.scene\n        # write a new workflow file\n        workflow.write(xmlfile)\n    \n    log.info('executing node sequence{}..'.format('s' if groups is not None else ''))\n    try:\n        if groups is not None:\n            subs = split(xmlfile=xmlfile, groups=groups, outdir=tmpdir_sub)\n            for sub in subs:\n                execute(sub, cleanup=cleanup, gpt_exceptions=gpt_exceptions, gpt_args=gpt_args)\n        else:\n            execute(xmlfile, cleanup=cleanup, gpt_exceptions=gpt_exceptions, gpt_args=gpt_args)\n    except Exception:\n        tb = traceback.format_exc()\n        log.info(tb)\n        log.info('failed: {}'.format(xmlfile))\n        raise\n    finally:\n        if cleanup:\n            for tmp in [tmpdir_bnr, tmpdir_sub]:\n                if os.path.isdir(tmp):\n                    shutil.rmtree(tmp, onerror=windows_fileprefix)\n            if os.path.isdir(tmpdir) and not os.listdir(tmpdir):\n                shutil.rmtree(tmpdir, onerror=windows_fileprefix)\n\n\ndef writer(xmlfile, outdir, basename_extensions=None,\n           clean_edges=False, clean_edges_npixels=1):\n    \"\"\"\n    SNAP product writing utility\n    \n    Parameters\n    ----------\n    xmlfile: str\n        the name of the workflow XML file.\n    outdir: str\n        the directory into which to write the final files.\n    basename_extensions: list of str or None\n        names of additional parameters to append to the basename, e.g. ``['orbitNumber_rel']``.\n    clean_edges: bool\n        erode noisy image edges? See :func:`pyroSAR.snap.auxil.erode_edges`.\n        Does not apply to layover-shadow mask.\n    clean_edges_npixels: int\n        the number of pixels to erode.\n\n    Returns\n    -------\n\n    \"\"\"\n    workflow = Workflow(xmlfile)\n    writers = workflow['operator=Write']\n    files = list(set([x.parameters['file'] for x in writers]))\n    if len(files) > 1:\n        raise RuntimeError('Multiple output files are not yet supported.')\n    else:\n        src = files[0]\n    src_format = writers[0].parameters['formatName']\n    suffix = workflow.suffix()\n    rtc = 'Terrain-Flattening' in workflow.operators\n    dem_name = workflow.tree.find('.//demName')\n    dem_nodata = None\n    if dem_name is not None:\n        dem_name = dem_name.text\n        if dem_name == 'External DEM':\n            dem_nodata = float(workflow.tree.find('.//externalDEMNoDataValue').text)\n        else:\n            dem_nodata_lookup = {'SRTM 1Sec HGT': -32768}\n            if dem_name in dem_nodata_lookup.keys():\n                dem_nodata = dem_nodata_lookup[dem_name]\n    \n    src_base = os.path.splitext(os.path.basename(src))[0]\n    outname_base = os.path.join(outdir, src_base)\n    \n    if src_format in ['ENVI', 'BEAM-DIMAP']:\n        message = '{}converting to GeoTIFF'\n        log.info(message.format('cleaning image edges and ' if clean_edges else ''))\n        translateoptions = {'options': ['-q', '-co', 'INTERLEAVE=BAND', '-co', 'TILED=YES'],\n                            'format': 'GTiff'}\n        if clean_edges:\n            erode_edges(src=src, only_boundary=True, pixels=clean_edges_npixels)\n        \n        if src_format == 'BEAM-DIMAP':\n            src = src.replace('.dim', '.data')\n        for item in finder(src, ['*.img'], recursive=False):\n            pattern = '(?P<refarea>(?:Sig|Gam)ma0)_(?P<pol>[HV]{2})'\n            basename = os.path.basename(item)\n            match = re.search(pattern, basename)\n            if match:\n                refarea, pol = match.groups()\n                correction = 'elp'\n                if rtc:\n                    if refarea == 'Gamma0':\n                        correction = 'rtc'\n                    elif refarea == 'Sigma0':\n                        tf = workflow['Terrain-Flattening']\n                        if tf.parameters['outputSigma0']:\n                            correction = 'rtc'\n                suffix_new = '{0}-{1}'.format(refarea.lower(), correction)\n                if 'dB' in suffix:\n                    suffix_new += '_db'\n                name_new = outname_base.replace(suffix, '{0}_{1}.tif'.format(pol, suffix_new))\n            else:\n                base = os.path.splitext(basename)[0] \\\n                    .replace('elevation', 'DEM')\n                if re.search('scatteringArea', base):\n                    base = re.sub('scatteringArea_[HV]{2}', 'scatteringArea', base)\n                if re.search('gammaSigmaRatio', base):\n                    base = re.sub('gammaSigmaRatio_[HV]{2}', 'gammaSigmaRatio', base)\n                if re.search('NE[BGS]Z', base):\n                    base = re.sub('(NE[BGS]Z)_([HV]{2})', r'\\g<2>_\\g<1>', base)\n                if re.search('layover_shadow_mask', base):\n                    base = re.sub('layover_shadow_mask_[HV]{2}', 'layoverShadowMask', base)\n                name_new = outname_base.replace(suffix, '{0}.tif'.format(base))\n            if re.search('elevation', basename):\n                nodata = dem_nodata\n            elif re.search('layoverShadowMask|layover_shadow_mask', basename):\n                nodata = 255\n            else:\n                nodata = 0\n            translateoptions['noData'] = nodata\n            gdal_translate(src=item, dst=name_new, **translateoptions)\n    else:\n        raise RuntimeError('The output file format must be ENVI or BEAM-DIMAP.')\n    ###########################################################################\n    # write the Sentinel-1 manifest.safe file as addition to the actual product\n    readers = workflow['operator=Read']\n    for reader in readers:\n        infile = reader.parameters['file']\n        try:\n            id = identify(infile)\n            if id.sensor in ['S1A', 'S1B', 'S1C', 'S1D']:\n                manifest = id.getFileObj(id.findfiles('manifest.safe')[0])\n                basename = id.outname_base(basename_extensions)\n                basename = '{0}_manifest.safe'.format(basename)\n                outname_manifest = os.path.join(outdir, basename)\n                with open(outname_manifest, 'wb') as out:\n                    out.write(manifest.read())\n        except RuntimeError:\n            continue\n\n\ndef is_consistent(workflow):\n    \"\"\"\n    check whether all nodes take either no source node or one that is in the list\n    \n    Parameters\n    ----------\n    workflow: Workflow\n        the workflow to be analyzed\n    Returns\n    -------\n    bool\n        is the list of nodes consistent?\n    \"\"\"\n    ids = workflow.ids\n    check = []\n    for node in workflow:\n        source = node.source\n        if source is None or source in ids or all([x in ids for x in source]):\n            check.append(True)\n        else:\n            check.append(False)\n    for node in workflow:\n        successors = workflow.successors(node.id, recursive=True)\n        operators = [workflow[x].operator for x in successors]\n        if node.operator == 'Write' or 'Write' in operators:\n            check.append(True)\n        else:\n            log.debug('node {} does not have a Write successor'.format(node.id))\n            check.append(False)\n    return all(check)\n\n\ndef split(xmlfile, groups, outdir=None):\n    \"\"\"\n    split a workflow file into groups and write them to separate workflows including source and write target linking.\n    The new workflows are written to a sub-directory `temp` of the target directory defined in the input's `Write` node.\n    Each new workflow is parameterized with a `Read` and `Write` node if they don't already exist. Temporary outputs are\n    written to `BEAM-DIMAP` files named after the workflow suffix sequence.\n    \n    Parameters\n    ----------\n    xmlfile: str\n        the workflow to be split\n    groups: list\n        a list of lists each containing IDs for individual nodes\n    outdir: str or None\n        the directory into which to write the XML workflows and the intermediate files created by them.\n        If None, the name will be created from the file name of the node with ID 'Write',\n        which is treated as a directory, and a subdirectory 'tmp'.\n\n    Returns\n    -------\n    list of str\n        the names of the newly written temporary workflows\n    \n    Raises\n    ------\n    RuntimeError\n    \"\"\"\n    workflow = Workflow(xmlfile)\n    write = workflow['Write']\n    if outdir is None:\n        out = write.parameters['file']\n        outdir = os.path.join(out, 'tmp')\n    os.makedirs(outdir, exist_ok=True)\n    \n    # the temporary XML files\n    outlist = []\n    # the names and format of temporary products\n    prod_tmp = {}\n    prod_tmp_format = {}\n    for position, group in enumerate(groups):\n        node_lookup = {}\n        log.debug('creating new workflow for group {}'.format(group))\n        new = parse_recipe('blank')\n        nodes = [workflow[x] for x in group]\n        for node in nodes:\n            id_old = node.id\n            sources = node.source\n            if sources is None:\n                sources = []\n                resetSuccessorSource = False\n            elif isinstance(sources, list):\n                resetSuccessorSource = False\n            else:\n                resetSuccessorSource = True\n                sources = [sources]\n            reset = []\n            for source in sources:\n                if source not in group:\n                    read = new.insert_node(parse_node('Read'), void=False,\n                                           resetSuccessorSource=resetSuccessorSource)\n                    reset.append(read.id)\n                    read.parameters['file'] = prod_tmp[source]\n                    read.parameters['formatName'] = prod_tmp_format[source]\n                    node_lookup[read.id] = source\n                else:\n                    reset.append(source)\n            if isinstance(sources, list):\n                sources_new_pos = [list(node_lookup.values()).index(x) for x in sources]\n                sources_new = [list(node_lookup.keys())[x] for x in sources_new_pos]\n                newnode = new.insert_node(node.copy(), before=sources_new, void=False,\n                                          resetSuccessorSource=False)\n            else:\n                newnode = new.insert_node(node.copy(), void=False,\n                                          resetSuccessorSource=False)\n            node_lookup[newnode.id] = id_old\n            \n            if not resetSuccessorSource:\n                newnode.source = reset\n        \n        # if possible, read the name of the SAR product for parsing names of temporary files\n        # this was found necessary for SliceAssembly, which expects the names in a specific format\n        products = [x.parameters['file'] for x in new['operator=Read']]\n        try:\n            id = identify(products[0])\n            filename = os.path.basename(id.scene)\n        except (RuntimeError, OSError):\n            filename = os.path.basename(products[0])\n        basename = os.path.splitext(filename)[0]\n        basename = re.sub(r'_tmp[0-9]+', '', basename)\n        \n        # add a Write node to all dangling nodes\n        counter = 0\n        for node in new:\n            dependants = [x for x in workflow.successors(node.id) if not x.startswith('Write') and not x in group]\n            if node.operator != 'Read' and len(dependants) > 0:\n                write = parse_node('Write')\n                new.insert_node(write, before=node.id, resetSuccessorSource=False)\n                id = str(position) if counter == 0 else '{}-{}'.format(position, counter)\n                tmp_out = os.path.join(outdir, '{}_tmp{}.dim'.format(basename, id))\n                prod_tmp[node_lookup[node.id]] = tmp_out\n                prod_tmp_format[node_lookup[node.id]] = 'BEAM-DIMAP'\n                write.parameters['file'] = tmp_out\n                write.parameters['formatName'] = 'BEAM-DIMAP'\n                counter += 1\n        if not is_consistent(new):\n            message = 'inconsistent group:\\n {}'.format(' -> '.join(group))\n            raise RuntimeError(message)\n        outname = os.path.join(outdir, '{}_tmp{}.xml'.format(basename, position))\n        new.write(outname)\n        outlist.append(outname)\n    return outlist\n\n\ndef groupbyWorkers(xmlfile, n=2):\n    \"\"\"\n    split a SNAP workflow into groups containing a maximum defined number of operators.\n    \n    Parameters\n    ----------\n    xmlfile: str\n        the SNAP xml workflow\n    n: int\n        the maximum number of worker nodes in each group; Read, Write and BandSelect are excluded.\n\n    Returns\n    -------\n    list[list[str]]\n        a list of lists each containing the IDs of all nodes belonging to the groups including Read and Write nodes;\n        this list can e.g. be passed to function :func:`split` to split the workflow into new sub-workflow files based\n        on the newly created groups or directly to function :func:`gpt`, which will call :func:`split` internally.\n    \"\"\"\n    workflow = Workflow(xmlfile)\n    workers_id = [x.id for x in workflow if x.operator not in ['Read', 'Write', 'BandSelect']]\n    readers_id = [x.id for x in workflow['operator=Read']]\n    writers_id = [x.id for x in workflow['operator=Write']]\n    selects_id = [x.id for x in workflow['operator=BandSelect']]\n    workers_groups = [workers_id[i:i + n] for i in range(0, len(workers_id), n)]\n    \n    # some nodes must be executed together with a preceding node. They are moved to the previous group.\n    def move_group(operator):\n        i = 0\n        while i < len(workers_groups):\n            if workers_groups[i][0].startswith(operator):\n                # get the group ID of the source node\n                source = workflow[workers_groups[i][0]].source\n                source_group_id = [source in x for x in workers_groups].index(True)\n                # move the node to the source group\n                workers_groups[source_group_id].append(workers_groups[i][0])\n                del workers_groups[i][0]\n            # delete the group if it is empty\n            if len(workers_groups[i]) == 0:\n                del workers_groups[i]\n            else:\n                i += 1\n    \n    for operator in ['ThermalNoiseRemoval', 'Warp']:\n        move_group(operator)\n    \n    # append the BandSelect nodes to the group of their source nodes\n    for item in selects_id:\n        source = workflow[item].source\n        for group in workers_groups:\n            if source in group:\n                group.insert(group.index(source) + 1, item)\n    nodes_groups = []\n    for group in workers_groups:\n        newgroup = []\n        for worker in group:\n            newgroup.append(worker)\n            source = workflow[worker].source\n            if not isinstance(source, list):\n                source = [source]\n            for item in source:\n                if item in readers_id:\n                    # append all Read nodes that are the worker's direct sources\n                    newgroup.insert(newgroup.index(worker), item)\n            for writer in writers_id:\n                if workflow[writer].source == worker:\n                    # append all Write nodes that directly have the worker as source\n                    newgroup.append(writer)\n        nodes_groups.append(newgroup)\n    return nodes_groups\n\n\nclass Workflow(object):\n    \"\"\"\n    Class for convenient handling of SNAP XML workflows\n    \n    Parameters\n    ----------\n    xmlfile: str\n        the workflow XML file\n    \"\"\"\n    \n    def __init__(self, xmlfile):\n        with open(xmlfile, 'r') as infile:\n            self.tree = ET.fromstring(infile.read())\n    \n    def __enter__(self):\n        return self\n    \n    def __exit__(self, exc_type, exc_val, exc_tb):\n        pass\n    \n    def __getitem__(self, item):\n        pattern = '(?P<key>[a-zA-Z-_]*)=(?P<value>[a-zA-Z-_]*)'\n        if isinstance(item, int):\n            return self.nodes()[item]\n        elif isinstance(item, str):\n            if re.search(pattern, item):\n                key, value = re.search(pattern, item).groups()\n                return [x for x in self if getattr(x, key) == value]\n            else:\n                try:\n                    return Node(self.tree.find('.//node[@id=\"{}\"]'.format(item)))\n                except TypeError:\n                    raise KeyError('unknown key: {}'.format(item))\n        else:\n            raise TypeError('item must be of type int or str')\n    \n    def __len__(self):\n        return len(self.tree.findall('node'))\n    \n    def __delitem__(self, key):\n        if not isinstance(key, str):\n            raise TypeError('key must be of type str')\n        element = self.tree.find('.//node[@id=\"{}\"]'.format(key))\n        node = Node(element)\n        source = node.source\n        successors = [x for x in self if x.source == key]\n        for node in successors:\n            node.source = source\n        self.tree.remove(element)\n    \n    def __str__(self):\n        self.__optimize_appearance()\n        rough_string = ET.tostring(self.tree, 'utf-8')\n        reparsed = minidom.parseString(rough_string)\n        return reparsed.toprettyxml(indent='\\t', newl='')\n    \n    def __iter__(self):\n        return iter(self.nodes())\n    \n    def successors(self, id, recursive=False):\n        \"\"\"\n        find the succeeding node(s) of a node\n        \n        Parameters\n        ----------\n        id: str\n            the ID of the node\n        recursive: bool\n            find successors recursively?\n\n        Returns\n        -------\n        list of str\n            the ID(s) of the successors\n        \"\"\"\n        if not isinstance(id, str):\n            raise TypeError(\"'id' must be of type 'str', is {}\".format(type(id)))\n        successors = []\n        for node in self:\n            if node.source == id or (isinstance(node.source, list) and id in node.source):\n                successors.append(node.id)\n        if recursive:\n            for item in successors:\n                new = self.successors(item, recursive=True)\n                successors.extend(new)\n            successors = list(set(successors))\n        return successors\n    \n    def __reset_successor_source(self, id):\n        \"\"\"\n        reset the sources of nodes to that of a newly inserted one\n        \n        Parameters\n        ----------\n        id: str\n            the ID of the newly inserted node\n\n        Returns\n        -------\n\n        \"\"\"\n        \n        def reset(id, source, excludes=None):\n            if isinstance(source, list):\n                for item in source:\n                    successors = self.successors(item)\n                    excludes = [x for x in successors if x in source]\n                    reset(id, item, excludes)\n            else:\n                try:\n                    # find the source nodes of the current node\n                    if source is not None:\n                        successors = self.successors(source)\n                    else:\n                        return  # nothing to reset\n                    # delete the ID of the current node from the successors\n                    if id in successors:\n                        del successors[successors.index(id)]\n                    if excludes is not None:\n                        for item in excludes:\n                            del successors[successors.index(item)]\n                    for successor in successors:\n                        successor_source = self[successor].source\n                        if isinstance(successor_source, list):\n                            successor_source[successor_source.index(source)] = id\n                            self[successor].source = successor_source\n                        else:\n                            self[successor].source = id\n                except IndexError:\n                    # case where no successor exists because the new node\n                    # is the new last node in the graph\n                    pass\n                except RuntimeError:\n                    # case where the successor node is of type Read\n                    pass\n        \n        reset(id, self[id].source)\n    \n    def __optimize_appearance(self):\n        \"\"\"\n        assign grid coordinates to the nodes for display in the SNAP GraphBuilder GUI\n        \n        This method is applied by :meth:`__str__` for the final formatting of the XML text representation\n        \n        Returns\n        -------\n\n        \"\"\"\n        layout = self.tree.find('.//applicationData[@id=\"Presentation\"]')\n        \n        counter = 0\n        x = 5\n        for id in self.ids:\n            pres = layout.find('.//node[@id=\"{}\"]'.format(id))\n            y = 20. if counter % 2 == 0 else 160.\n            if pres is None:\n                pres = ET.SubElement(layout, 'node', {'id': id})\n                pos = ET.SubElement(pres, 'displayPosition',\n                                    {'x': \"{}\".format(x), 'y': \"{}\".format(y)})\n            else:\n                pres.find('displayPosition').attrib['x'] = \"{}\".format(x)\n                pres.find('displayPosition').attrib['y'] = \"{}\".format(y)\n            counter += 1\n            x += len(id) * 8\n    \n    @property\n    def ids(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list\n            the IDs of all nodes\n        \"\"\"\n        return [node.id for node in self]\n    \n    def index(self, node):\n        \"\"\"\n        \n        Parameters\n        ----------\n        node: Node\n            a node in the workflow\n\n        Returns\n        -------\n        int\n            the index position of the node in the workflow\n        \"\"\"\n        return list(self.tree).index(node.element)\n    \n    def insert_node(self, node, before=None, after=None, resetSuccessorSource=True, void=True):\n        \"\"\"\n        insert one or multiple node(s) into the workflow including setting the source to the predecessor\n        and setting the ID as source of the successor.\n        \n        Parameters\n        ----------\n        node: Node or list[Node]\n            the node(s) to be inserted\n        before: Node, str or list\n            a Node object; the ID(s) of the node(s) before the newly inserted node; a list of node IDs is intended for\n            nodes that require multiple sources, e.g. sliceAssembly\n        after: Node, str\n            a Node object; the ID of the node after the newly inserted node\n        resetSuccessorSource: bool\n            reset the source of the successor node to the ID of the newly inserted node?\n        void: bool\n            if false, the function returns the node\n\n        Returns\n        -------\n        Node or list[Node] or None\n            the new node, a list of nodes, or None, depending on the `node` input and argument `void`\n        \"\"\"\n        if isinstance(node, list):\n            self.insert_node(node=node[0], before=before, after=after,\n                             resetSuccessorSource=resetSuccessorSource, void=True)\n            for i, item in enumerate(node[1:]):\n                self.insert_node(node=item, before=node[i].id,\n                                 resetSuccessorSource=resetSuccessorSource, void=True)\n        else:\n            ncopies = [x.operator for x in self.nodes()].count(node.operator)\n            if ncopies > 0:\n                node.id = '{0} ({1})'.format(node.operator, ncopies + 1)\n            else:\n                node.id = node.operator\n            \n            if isinstance(before, Node):\n                before = before.id\n            if isinstance(after, Node):\n                after = after.id\n            \n            if before is None and after is None and len(self) > 0:\n                before = self[len(self) - 1].id\n            if before and not after:\n                if isinstance(before, list):\n                    indices = [self.index(self[x]) for x in before]\n                    predecessor = self[before[indices.index(max(indices))]]\n                else:\n                    predecessor = self[before]\n                log.debug('inserting node {} after {}'.format(node.id, predecessor.id))\n                position = self.index(predecessor) + 1\n                self.tree.insert(position, node.element)\n                newnode = Node(self.tree[position])\n                ####################################################\n                # set the source product for the new node\n                if newnode.operator != 'Read':\n                    newnode.source = before\n                ####################################################\n                # set the source product for the node after the new node\n                if resetSuccessorSource:\n                    self.__reset_successor_source(newnode.id)\n            ########################################################\n            elif after and not before:\n                successor = self[after]\n                log.debug('inserting node {} before {}'.format(node.id, successor.id))\n                position = self.index(successor)\n                self.tree.insert(position, node.element)\n                newnode = Node(self.tree[position])\n                ####################################################\n                # set the source product for the new node\n                if newnode.operator != 'Read':\n                    source = successor.source\n                    newnode.source = source\n                ####################################################\n                # set the source product for the node after the new node\n                if resetSuccessorSource:\n                    self[after].source = newnode.id\n            else:\n                log.debug('inserting node {}'.format(node.id))\n                self.tree.insert(len(self.tree) - 1, node.element)\n        if not void:\n            return node\n    \n    def nodes(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list[Node]\n            the list of :class:`Node` objects in the workflow\n        \"\"\"\n        return [Node(x) for x in self.tree.findall('node')]\n    \n    @property\n    def operators(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list\n            the names of the unique operators in the workflow\n        \"\"\"\n        return sorted(list(set([node.operator for node in self])))\n    \n    def refresh_ids(self):\n        \"\"\"\n        Ensure unique IDs for all nodes. If two nodes with the same ID are found one is renamed to \"ID (2)\".\n        E.g. 2 x \"Write\" -> \"Write\", \"Write (2)\".\n        This method is no longer used and is just kept in case there is need for it in the future.\n        \n        Returns\n        -------\n\n        \"\"\"\n        counter = {}\n        for node in self:\n            operator = node.operator\n            if operator not in counter.keys():\n                counter[operator] = 1\n            else:\n                counter[operator] += 1\n            if counter[operator] > 1:\n                new = '{} ({})'.format(operator, counter[operator])\n            else:\n                new = operator\n            if node.id != new:\n                log.debug('renaming node {} to {}'.format(node.id, new))\n                node.id = new\n    \n    def set_par(self, key, value, exceptions=None):\n        \"\"\"\n        set a parameter for all nodes in the workflow\n        \n        Parameters\n        ----------\n        key: str\n            the parameter name\n        value: bool or int or float or str\n            the parameter value\n        exceptions: list\n            a list of node IDs whose parameters should not be changed\n\n        Returns\n        -------\n\n        \"\"\"\n        for node in self:\n            if exceptions is not None and node.id in exceptions:\n                continue\n            if key in node.parameters.keys():\n                node.parameters[key] = value2str(value)\n    \n    def suffix(self, stop=None):\n        \"\"\"\n        Get the SNAP operator suffix sequence\n        \n        Parameters\n        ----------\n        stop: str\n            the ID of the last workflow node\n        \n        Returns\n        -------\n        str\n            a file suffix created from the order of which the nodes will be executed\n        \"\"\"\n        nodes = self.tree.findall('node')\n        names = [re.sub(r'[ ]*\\([0-9]+\\)', '', y.attrib['id']) for y in nodes]\n        names_unique = []\n        for name in names:\n            if name not in names_unique:\n                names_unique.append(name)\n            if name == stop:\n                break\n        config = ExamineSnap()\n        suffix = '_'.join(filter(None, [config.get_suffix(x) for x in names_unique]))\n        return suffix\n    \n    def write(self, outfile):\n        \"\"\"\n        write the workflow to an XML file\n        \n        Parameters\n        ----------\n        outfile: str\n            the name of the file to write\n\n        Returns\n        -------\n\n        \"\"\"\n        outfile = outfile if outfile.endswith('.xml') else outfile + '.xml'\n        log.debug('writing {}'.format(outfile))\n        with open(outfile, 'w') as out:\n            out.write(self.__str__())\n\n\nclass Node(object):\n    \"\"\"\n    class for handling of SNAP workflow processing nodes\n    \n    Parameters\n    ----------\n    element: ~xml.etree.ElementTree.Element\n        the node XML element\n    \"\"\"\n    \n    def __init__(self, element):\n        if not isinstance(element, ET.Element):\n            raise TypeError('element must be of type xml.etree.ElementTree.Element')\n        self.element = element\n    \n    def __repr__(self):\n        return \"pyroSAR Node object '{}'\".format(self.id)\n    \n    def __str__(self):\n        rough_string = ET.tostring(self.element, 'utf-8')\n        reparsed = minidom.parseString(rough_string)\n        return reparsed.toprettyxml(indent='\\t', newl='')\n    \n    def __set_source(self, key, value):\n        source = self.element.find('.//sources/{}'.format(key))\n        if source is None:\n            child = ET.SubElement(self.element.find('.//sources'),\n                                  key, {'refid': value})\n        else:\n            source.attrib['refid'] = value\n    \n    def copy(self):\n        \"\"\"\n        \n        Returns\n        -------\n        Node\n            a copy of the Node object\n        \"\"\"\n        return Node(copy.deepcopy(self.element))\n    \n    @property\n    def id(self):\n        \"\"\"\n        \n        Returns\n        -------\n        str\n            the node ID\n        \"\"\"\n        return self.element.attrib['id']\n    \n    @id.setter\n    def id(self, value):\n        self.element.attrib['id'] = value\n    \n    @property\n    def operator(self):\n        \"\"\"\n        \n        Returns\n        -------\n        str\n            the name of the node's processing operator\n        \"\"\"\n        return self.element.find('.//operator').text\n    \n    @property\n    def parameters(self):\n        \"\"\"\n        \n        Returns\n        -------\n        Par or Par_BandMath\n            the processing parameters of the node\n        \"\"\"\n        params = self.element.find('.//parameters')\n        if self.operator == 'BandMaths':\n            return Par_BandMath(operator=self.operator, element=params)\n        else:\n            return Par(operator=self.operator, element=params)\n    \n    @property\n    def source(self):\n        \"\"\"\n        \n        Returns\n        -------\n        str or list\n            the ID(s) of the source node(s)\n        \"\"\"\n        sources = []\n        elements = self.element.findall('.//sources/')\n        for element in elements:\n            if element.tag.startswith('sourceProduct'):\n                sources.append(element.attrib['refid'])\n        \n        if len(sources) == 0:\n            return None\n        elif len(sources) == 1:\n            return sources[0]\n        else:\n            return sources\n    \n    @source.setter\n    def source(self, value):\n        \"\"\"\n        reset the source of the node by ID\n        \n        Parameters\n        ----------\n        value: str or list\n            the ID(s) of the new source node(s)\n\n        Returns\n        -------\n        \n        Raises\n        ------\n        RuntimeError\n        \"\"\"\n        if isinstance(value, list) and len(value) == 1:\n            value = value[0]\n        log.debug('setting the source of node {} to {}'.format(self.id, value))\n        if isinstance(value, str):\n            if isinstance(self.source, list):\n                raise TypeError(\n                    'node {} has multiple sources, which must be reset using a list, not str'.format(self.id))\n            self.__set_source('sourceProduct', value)\n        elif isinstance(value, list):\n            key = 'sourceProduct'\n            for i, item in enumerate(value):\n                self.__set_source(key, item)\n                key = 'sourceProduct.{}'.format(i + 1)\n\n\nclass Par(object):\n    \"\"\"\n    class for handling processing node parameters\n    \n    Parameters\n    ----------\n    operator: str\n        the name of the SNAP Node operator\n    element: ~xml.etree.ElementTree.Element\n        the node parameter XML element\n    \"\"\"\n    \n    def __init__(self, operator, element):\n        self.operator = operator\n        self.__element = element\n    \n    def __delitem__(self, key):\n        par = self.__element.find('.//{}'.format(key))\n        self.__element.remove(par)\n    \n    def __getitem__(self, item):\n        \"\"\"\n        \n        Parameters\n        ----------\n        item\n\n        Returns\n        -------\n        str\n        \"\"\"\n        if item not in self.keys():\n            raise KeyError('key {} does not exist'.format(item))\n        return self.__element.find('.//{}'.format(item)).text\n    \n    def __setitem__(self, key, value):\n        if key not in self.keys():\n            raise KeyError(\"unknown key for node '{}': '{}'\".format(self.operator, key))\n        strval = value2str(value)\n        self.__element.find('.//{}'.format(key)).text = strval\n    \n    def __repr__(self):\n        return str(self.dict())\n    \n    def dict(self):\n        \"\"\"\n        \n        Returns\n        -------\n        dict\n            the parameters as a dictionary\n        \"\"\"\n        return dict(self.items())\n    \n    def items(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list\n            the parameters as (key, value) as from :meth:`dict.items()`\n        \"\"\"\n        return list(zip(self.keys(), self.values()))\n    \n    def keys(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list\n            the parameter names as from :meth:`dict.keys()`\n        \"\"\"\n        return [x.tag for x in self.__element.findall('./')]\n    \n    def values(self):\n        \"\"\"\n        \n        Returns\n        -------\n        list\n            the parameter values as from :meth:`dict.values()`\n        \"\"\"\n        return [x.text for x in self.__element.findall('./')]\n\n\nclass Par_BandMath(Par):\n    \"\"\"\n    class for handling BandMaths node parameters\n\n    Parameters\n    ----------\n    element: ~xml.etree.ElementTree.Element\n        the node parameter XML element\n    \"\"\"\n    \n    def __init__(self, operator, element):\n        self.operator = operator\n        self.__element = element\n        super(Par_BandMath, self).__init__(operator, element)\n    \n    def __getitem__(self, item):\n        if item in ['variables', 'targetBands']:\n            out = []\n            for x in self.__element.findall('.//{}'.format(item[:-1])):\n                out.append(Par(self.operator, x))\n            return out\n        else:\n            raise ValueError(\"can only get items 'variables' and 'targetBands'\")\n    \n    def clear_variables(self):\n        \"\"\"\n        remove all `variables` elements from the node\n        \n        Returns\n        -------\n\n        \"\"\"\n        var = self.__element.find('.//variables')\n        for item in var:\n            var.remove(item)\n    \n    def add_equation(self):\n        \"\"\"\n        add an equation element to the node\n        \n        Returns\n        -------\n\n        \"\"\"\n        eqs = self.__element.find('.//targetBands')\n        eqlist = eqs.findall('.//targetBand')\n        eq1 = eqlist[0]\n        eq2 = copy.deepcopy(eq1)\n        for item in eq2:\n            item.text = None\n        eqs.insert(len(eqlist), eq2)\n\n\ndef value2str(value):\n    \"\"\"\n    format a parameter value to string to be inserted into a workflow\n    \n    Parameters\n    ----------\n    value: bool, int, float, list\n\n    Returns\n    -------\n    str\n        the string representation of the value\n    \"\"\"\n    if isinstance(value, bool):\n        strval = str(value).lower()\n    elif isinstance(value, list):\n        strval = ','.join(map(str, value))\n    elif value is None:\n        strval = value\n    else:\n        strval = str(value)\n    return strval\n\n\ndef erode_edges(src, only_boundary=False, connectedness=4, pixels=1):\n    \"\"\"\n    Erode noisy edge pixels in SNAP-processed images.\n    It was discovered that images contain border pixel artifacts after `Terrain-Correction`.\n    Likely this is coming from treating the value 0 as regular value instead of no data during resampling.\n    This function erodes these edge pixels using :func:`scipy.ndimage.binary_erosion`.\n    scipy is not a base dependency of pyroSAR and has to be installed separately.\n    \n    .. figure:: figures/snap_erode_edges.png\n        :align: center\n        \n        VV gamma0 RTC backscatter image visualizing the noisy border (left) and the cleaned result (right).\n        The area covers approx. 2.3 x 2.3 km². Pixel spacing is 20 m. connectedness 4, 1 pixel.\n    \n    Parameters\n    ----------\n    src: str\n        a processed SAR image in BEAM-DIMAP format (.dim), a single .img file (ENVI format) or a\n        directory with .img files. 0 is assumed as no data value.\n    only_boundary: bool\n        only erode edges at the image boundary (or also at data gaps caused by e.g. masking during Terrain-Flattening)?\n    connectedness: int\n        the number of pixel neighbors considered for the erosion. Either 4 or 8, translating to a\n        :func:`scipy.ndimage.generate_binary_structure` `connectivity` of 1 or 2, respectively.\n    pixels: int\n        the number of pixels to erode from the edges. Directly translates to `iterations` of\n        :func:`scipy.ndimage.iterate_structure`.\n    \n    Returns\n    -------\n\n    \"\"\"\n    images = None\n    if src.endswith('.dim'):\n        workdir = src.replace('.dim', '.data')\n    elif src.endswith('.img'):\n        images = [src]\n        workdir = None\n    elif os.path.isdir(src):\n        workdir = src\n    else:\n        raise RuntimeError(\"'src' must be either a file in BEAM-DIMAP format (extension '.dim'), \"\n                           \"an ENVI file with extension *.img, or a directory.\")\n    \n    if images is None:\n        images = [x for x in finder(workdir, ['*.img'], recursive=False)\n                  if 'layoverShadowMask' not in x]\n    if len(images) == 0:\n        raise RuntimeError(\"could not find any files with extension '.img'\")\n    \n    from scipy.ndimage import binary_erosion, generate_binary_structure, iterate_structure\n    \n    if connectedness == 4:\n        connectivity = 1\n    elif connectedness == 8:\n        connectivity = 2\n    else:\n        raise ValueError('connectedness must be either 4 or 8')\n    \n    structure = generate_binary_structure(rank=2, connectivity=connectivity)\n    if pixels > 1:\n        structure = iterate_structure(structure=structure, iterations=pixels)\n    \n    if workdir is not None:\n        fname_mask = os.path.join(workdir, 'datamask.tif')\n    else:\n        fname_mask = os.path.join(os.path.dirname(src), 'datamask.tif')\n    write_intermediates = False  # this is intended for debugging\n    \n    def erosion(src, dst, structure, only_boundary, write_intermediates=False):\n        with Raster(src) as ref:\n            array = ref.array()\n            if not os.path.isfile(dst):\n                mask = array != 0\n                # do not perform erosion if data only contains nodata (mask == 1)\n                if len(mask[mask == 1]) == 0:\n                    ref.write(outname=dst, array=mask, dtype='Byte',\n                              options=['COMPRESS=DEFLATE'])\n                    return array, mask\n                if write_intermediates:\n                    ref.write(dst.replace('.tif', '_init.tif'),\n                              array=mask, dtype='Byte',\n                              options=['COMPRESS=DEFLATE'])\n                if only_boundary:\n                    with vectorize(target=mask, reference=ref) as vec:\n                        with boundary(vec, expression=\"value=1\") as bounds:\n                            with rasterize(vectorobject=bounds, reference=ref, nodata=None) as new:\n                                mask = new.array()\n                                if write_intermediates:\n                                    vec.write(dst.replace('.tif', '_init_vectorized.gpkg'))\n                                    bounds.write(dst.replace('.tif', '_boundary_vectorized.gpkg'))\n                                    new.write(outname=dst.replace('.tif', '_boundary.tif'),\n                                              dtype='Byte', options=['COMPRESS=DEFLATE'])\n                mask = binary_erosion(input=mask, structure=structure)\n                ref.write(outname=dst, array=mask, dtype='Byte',\n                          options=['COMPRESS=DEFLATE'])\n            else:\n                with Raster(dst) as ras:\n                    mask = ras.array()\n        array[mask == 0] = 0\n        return array, mask\n    \n    # make sure a backscatter image is used for creating the mask\n    backscatter = [x for x in images if re.search('^(?:Sigma0_|Gamma0_|C11|C22)', os.path.basename(x))]\n    images.insert(0, images.pop(images.index(backscatter[0])))\n    \n    mask = None\n    for img in images:\n        if mask is None:\n            array, mask = erosion(src=img, dst=fname_mask,\n                                  structure=structure, only_boundary=only_boundary,\n                                  write_intermediates=write_intermediates)\n        else:\n            with Raster(img) as ras:\n                array = ras.array()\n            array[mask == 0] = 0\n        # do not apply mask if it only contains 1 (valid data)\n        if len(mask[mask == 0]) == 0:\n            break\n        \n        # ensure usage of ENVI driver for .img files\n        ras = gdal.OpenEx(\n            img,\n            gdal.OF_RASTER | gdal.OF_UPDATE,\n            allowed_drivers=[\"ENVI\"]\n        )\n        band = ras.GetRasterBand(1)\n        band.WriteArray(array)\n        band.FlushCache()\n        band = None\n        ras = None\n\n\ndef mli_parametrize(scene, spacing=None, rlks=None, azlks=None, **kwargs):\n    \"\"\"\n    Convenience function for parametrizing a `Multilook` node.\n    \n    Parameters\n    ----------\n    scene: pyroSAR.drivers.ID\n        The SAR scene to be processed\n    spacing: int or float or None\n        the target pixel spacing for automatic determination of looks using function\n        :func:`~pyroSAR.ancillary.multilook_factors`. Overridden by arguments `rlks` and `azlks` if they are not None.\n    rlks: int or None\n        the number of range looks\n    azlks: int or None\n        the number of azimuth looks\n    bands: list[str] or None\n        an optional list of bands names\n    kwargs\n        further keyword arguments for node parametrization. Known options:\n        \n         - grSquarePixel\n         - outputIntensity\n         - sourceBands\n    \n    Returns\n    -------\n    Node or None\n        either a `Node` object if multilooking is necessary (either `rlks` or `azlks` are greater than 1) or None.\n    \n    See Also\n    --------\n    pyroSAR.ancillary.multilook_factors\n    \"\"\"\n    try:\n        image_geometry = scene.meta['image_geometry']\n        incidence = scene.meta['incidence']\n    except KeyError:\n        msg = 'This function does not yet support {} products in {} format'\n        raise RuntimeError(msg.format(scene.sensor, scene.__class__.__name__))\n    \n    if rlks is None and azlks is None:\n        if spacing is None:\n            raise RuntimeError(\"either 'spacing' or 'rlks' and 'azlks' must set to numeric values\")\n        rlks, azlks = multilook_factors(source_rg=scene.spacing[0],\n                                        source_az=scene.spacing[1],\n                                        target=spacing,\n                                        geometry=image_geometry,\n                                        incidence=incidence)\n    if [rlks, azlks].count(None) > 0:\n        raise RuntimeError(\"'rlks' and 'azlks' must either both be integers or None\")\n    \n    if azlks > 1 or rlks > 1 or scene.sensor in ['ERS1', 'ERS2', 'ASAR']:\n        ml = parse_node('Multilook')\n        ml.parameters['nAzLooks'] = azlks\n        ml.parameters['nRgLooks'] = rlks\n        for key, val in kwargs.items():\n            ml.parameters[key] = val\n        return ml\n\n\ndef orb_parametrize(scene, formatName, allow_RES_OSV=True, url_option=1, **kwargs):\n    \"\"\"\n    convenience function for parametrizing an `Apply-Orbit-File`.\n    Required Sentinel-1 orbit files are directly downloaded.\n    \n    Parameters\n    ----------\n    scene: pyroSAR.drivers.ID\n        The SAR scene to be processed\n    workflow: Workflow\n        the SNAP workflow object\n    before: str\n        the ID of the node after which the `Apply-Orbit-File` node will be inserted\n    formatName: str\n        the scene's data format\n    allow_RES_OSV: bool\n        (only applies to Sentinel-1) Also allow the less accurate RES orbit files to be used?\n    url_option: int\n        the OSV download URL option; see :meth:`pyroSAR.S1.OSV.catch`\n    kwargs\n        further keyword arguments for node parametrization. Known options:\n        \n         - continueOnFail\n         - polyDegree\n    \n    Returns\n    -------\n    Node\n        the Apply-Orbit-File node object\n    \"\"\"\n    orbitType = None\n    orbit_lookup = {'SENTINEL-1': 'Sentinel Precise (Auto Download)'}\n    if formatName in orbit_lookup:\n        orbitType = orbit_lookup[formatName]\n    if formatName == 'ENVISAT':  # ASAR, ERS1, ERS2\n        if scene.sensor == 'ASAR':\n            orbitType = 'DORIS Precise VOR (ENVISAT) (Auto Download)'\n        else:\n            # Another option for ERS is 'DELFT Precise (ENVISAT, ERS1&2) (Auto Download)'.\n            # Neither option is suitable for all products, and auto-selection can\n            # only happen once a downloader (similar to S1.auxil.OSV) is written.\n            orbitType = 'PRARE Precise (ERS1&2) (Auto Download)'\n    if orbitType is None:\n        raise RuntimeError(f'Could not determine orbit type for {formatName} format')\n    \n    if formatName == 'SENTINEL-1':\n        osv_type = ['POE', 'RES'] if allow_RES_OSV else 'POE'\n        match = scene.getOSV(osvType=osv_type, returnMatch=True, url_option=url_option)\n        if match is None and allow_RES_OSV:\n            scene.getOSV(osvType='RES', url_option=url_option)\n            orbitType = 'Sentinel Restituted (Auto Download)'\n    \n    orb = parse_node('Apply-Orbit-File')\n    orb.parameters['orbitType'] = orbitType\n    for key, val in kwargs.items():\n        orb.parameters[key] = val\n    return orb\n\n\ndef sub_parametrize(scene, geometry=None, offset=None, buffer=0.01, copyMetadata=True, **kwargs):\n    \"\"\"\n    convenience function for parametrizing an `Subset` node.\n    \n    Parameters\n    ----------\n    scene: pyroSAR.drivers.ID\n        The SAR scene to be processed\n    geometry: dict or spatialist.vector.Vector or str or None\n        A vector geometry for geographic subsetting (node parameter geoRegion):\n        \n         - :class:`~spatialist.vector.Vector`: a vector object in arbitrary CRS\n         - :class:`str`: a name of a file that can be read with :class:`~spatialist.vector.Vector` in arbitrary CRS\n         - :class:`dict`: a dictionary with keys `xmin`, `xmax`, `ymin`, `ymax` in EPSG:4326 coordinates\n    offset: tuple or None\n        a tuple with pixel coordinates as (left, right, top, bottom)\n    buffer: int or float\n        an additional buffer in degrees to add around the `geometry`\n    copyMetadata: bool\n        copy the metadata of the source product?\n    kwargs\n        further keyword arguments for node parametrization. Known options:\n        \n         - fullSwath\n         - referenceBand\n         - sourceBands\n         - subSamplingX\n         - subSamplingY\n         - tiePointGrids\n\n    Returns\n    -------\n    Node\n        the Subset node object\n    \"\"\"\n    subset = parse_node('Subset')\n    if geometry:\n        if isinstance(geometry, dict):\n            ext = geometry\n        else:\n            if isinstance(geometry, Vector):\n                shp = geometry.clone()\n            elif isinstance(geometry, str):\n                shp = Vector(geometry)\n            else:\n                raise TypeError(\"argument 'geometry' must be either a dictionary, a Vector object or a filename.\")\n            # reproject the geometry to WGS 84 latlon\n            shp.reproject(4326)\n            ext = shp.extent\n            shp.close()\n        # add an extra buffer\n        ext['xmin'] -= buffer\n        ext['ymin'] -= buffer\n        ext['xmax'] += buffer\n        ext['ymax'] += buffer\n        with bbox(ext, 4326) as bounds:\n            inter = intersect(scene.bbox(), bounds)\n            if not inter:\n                raise RuntimeError('no bounding box intersection between shapefile and scene')\n            inter.close()\n            wkt = bounds.convert2wkt()[0]\n        subset.parameters['region'] = [0, 0, scene.samples, scene.lines]\n        subset.parameters['geoRegion'] = wkt\n    #######################\n    # (optionally) configure Subset node for pixel offsets\n    elif offset and not geometry:\n        # left, right, top and bottom offset in pixels\n        l, r, t, b = offset\n        \n        subset_values = [l, t, scene.samples - l - r, scene.lines - t - b]\n        subset.parameters['region'] = subset_values\n        subset.parameters['geoRegion'] = ''\n    else:\n        raise RuntimeError(\"one of 'geometry' and 'offset' must be set\")\n    \n    subset.parameters['copyMetadata'] = copyMetadata\n    for key, val in kwargs.items():\n        subset.parameters[key] = val\n    return subset\n\n\ndef geo_parametrize(spacing, t_srs, tc_method='Range-Doppler',\n                    sourceBands=None, demName='SRTM 1Sec HGT', externalDEMFile=None,\n                    externalDEMNoDataValue=None, externalDEMApplyEGM=True,\n                    alignToStandardGrid=False, standardGridAreaOrPoint='point',\n                    standardGridOriginX=0, standardGridOriginY=0,\n                    nodataValueAtSea=False, export_extra=None,\n                    demResamplingMethod='BILINEAR_INTERPOLATION',\n                    imgResamplingMethod='BILINEAR_INTERPOLATION',\n                    **kwargs):\n    \"\"\"\n    convenience function for parametrizing geocoding nodes.\n    \n    Parameters\n    ----------\n    workflow: Workflow\n        the SNAP workflow object\n    before: str\n        the ID of the node after which the terrain correction node will be inserted\n    tc_method: str\n        the terrain correction method. Supported options:\n        \n         - Range-Doppler (SNAP node `Terrain-Correction`)\n         - SAR simulation cross correlation\n           (SNAP nodes `SAR-Simulation`->`Cross-Correlation`->`Warp`->`Terrain-Correction`)\n    \n    sourceBands: List[str] or None\n        the image band names to geocode; default None: geocode all incoming bands.\n    spacing: int or float\n        The target pixel spacing in meters.\n    t_srs: int or str or osgeo.osr.SpatialReference\n        A target geographic reference system in WKT, EPSG, PROJ4 or OPENGIS format.\n        See function :func:`spatialist.auxil.crsConvert()` for details.\n    demName: str\n        The name of the auto-download DEM. Default is 'SRTM 1Sec HGT'. Is ignored when `externalDEMFile` is not None.\n        Supported options:\n        \n         - ACE2_5Min\n         - ACE30\n         - ASTER 1sec GDEM\n         - CDEM\n         - Copernicus 30m Global DEM\n         - Copernicus 90m Global DEM\n         - GETASSE30\n         - SRTM 1Sec Grid\n         - SRTM 1Sec HGT\n         - SRTM 3Sec\n    externalDEMFile: str or None, optional\n        The absolute path to an external DEM file. Default is None. Overrides `demName`.\n    externalDEMNoDataValue: int, float or None, optional\n        The no data value of the external DEM. If not specified (default) the function will try to read it from the\n        specified external DEM.\n    externalDEMApplyEGM: bool, optional\n        Apply Earth Gravitational Model to external DEM? Default is True.\n    alignToStandardGrid: bool\n        Align all processed images to a common grid?\n    standardGridAreaOrPoint: str\n        treat alignment coordinate as pixel center ('point', SNAP default) or upper left ('area').\n    standardGridOriginX: int or float\n        The x origin value for grid alignment\n    standardGridOriginY: int or float\n        The y origin value for grid alignment\n    nodataValueAtSea:bool\n        mask values over sea?\n    export_extra: list[str] or None\n        a list of ancillary layers to write. Supported options:\n        \n         - DEM\n         - latLon\n         - incidenceAngleFromEllipsoid\n         - layoverShadowMask\n         - localIncidenceAngle\n         - projectedLocalIncidenceAngle\n         - selectedSourceBand\n    demResamplingMethod: str\n        the DEM resampling method\n    imgResamplingMethod: str\n        the image resampling method\n    kwargs\n        further keyword arguments for node parametrization. Known options:\n        \n         - outputComplex\n         - applyRadiometricNormalization\n         - saveSigmaNought\n         - saveGammaNought\n         - saveBetaNought\n         - incidenceAngleForSigma0\n         - incidenceAngleForGamma0\n         - auxFile\n         - externalAuxFile\n         - openShiftsFile (SAR simulation cross correlation only)\n         - openResidualsFile (SAR simulation cross correlation only)\n    \n    Returns\n    -------\n    Node or list[Node]\n        the Terrain-Correction node object or a list containing the objects for SAR-Simulation,\n        Cross-Correlation and SARSim-Terrain-Correction.\n    \"\"\"\n    tc = parse_node('Terrain-Correction')\n    tc.parameters['nodataValueAtSea'] = nodataValueAtSea\n    \n    if tc_method == 'Range-Doppler':\n        tc.parameters['sourceBands'] = sourceBands\n        sarsim = None\n        out = tc\n        dem_nodes = [tc]\n    elif tc_method == 'SAR simulation cross correlation':\n        sarsim = parse_node('SAR-Simulation')\n        sarsim.parameters['sourceBands'] = sourceBands\n        cc = parse_node('Cross-Correlation')\n        cc.parameters['coarseRegistrationWindowWidth'] = 64\n        cc.parameters['coarseRegistrationWindowHeight'] = 64\n        cc.parameters['maxIteration'] = 2\n        cc.parameters['onlyGCPsOnLand'] = True\n        warp = parse_node('Warp')\n        dem_nodes = [sarsim, tc]\n        out = [sarsim, cc, warp, tc]\n    else:\n        raise RuntimeError(f'tc_method not recognized: \"{tc_method}\"')\n    \n    tc.parameters['imgResamplingMethod'] = imgResamplingMethod\n    \n    if standardGridAreaOrPoint == 'area':\n        standardGridOriginX -= spacing / 2\n        standardGridOriginY += spacing / 2\n    tc.parameters['alignToStandardGrid'] = alignToStandardGrid\n    tc.parameters['standardGridOriginX'] = standardGridOriginX\n    tc.parameters['standardGridOriginY'] = standardGridOriginY\n    \n    # specify spatial resolution and coordinate reference system of the output dataset\n    tc.parameters['pixelSpacingInMeter'] = spacing\n    \n    try:\n        # try to convert the CRS into EPSG code (for readability in the workflow XML)\n        t_srs = crsConvert(t_srs, 'epsg')\n    except TypeError:\n        raise RuntimeError(\"format of parameter 't_srs' not recognized\")\n    except RuntimeError:\n        # This error can occur when the CRS does not have a corresponding EPSG code.\n        # In this case the original CRS representation is written to the workflow.\n        pass\n    \n    # The EPSG code 4326 is not supported by SNAP and thus the WKT string has to be defined.\n    # In all other cases defining EPSG:{code} will do.\n    if t_srs == 4326:\n        t_srs = 'GEOGCS[\"WGS84(DD)\",' \\\n                'DATUM[\"WGS84\",' \\\n                'SPHEROID[\"WGS84\", 6378137.0, 298.257223563]],' \\\n                'PRIMEM[\"Greenwich\", 0.0],' \\\n                'UNIT[\"degree\", 0.017453292519943295],' \\\n                'AXIS[\"Geodetic longitude\", EAST],' \\\n                'AXIS[\"Geodetic latitude\", NORTH]]'\n    \n    if isinstance(t_srs, int):\n        t_srs = 'EPSG:{}'.format(t_srs)\n    \n    tc.parameters['mapProjection'] = t_srs\n    \n    export_extra_options = \\\n        ['DEM', 'latLon',\n         'incidenceAngleFromEllipsoid',\n         'layoverShadowMask',\n         'localIncidenceAngle',\n         'projectedLocalIncidenceAngle',\n         'selectedSourceBand']\n    if export_extra is not None:\n        for item in export_extra:\n            if item in export_extra_options:\n                key = f'save{item[0].upper()}{item[1:]}'\n                tc.parameters[key] = True\n    \n    for dem_node in dem_nodes:\n        dem_parametrize(node=dem_node, demName=demName,\n                        externalDEMFile=externalDEMFile,\n                        externalDEMNoDataValue=externalDEMNoDataValue,\n                        externalDEMApplyEGM=externalDEMApplyEGM,\n                        demResamplingMethod=demResamplingMethod)\n    \n    for key, val in kwargs.items():\n        tc.parameters[key] = val\n    return out\n\n\ndef dem_parametrize(workflow=None, node=None, demName='SRTM 1Sec HGT', externalDEMFile=None,\n                    externalDEMNoDataValue=None, externalDEMApplyEGM=False,\n                    demResamplingMethod='BILINEAR_INTERPOLATION'):\n    \"\"\"\n    DEM parametrization for a full workflow or a single node. In the former case, all nodes with the\n    DEM-relevant parameters can be modified at once, e.g. `Terrain-Flattening` and `Terrain-Correction`.\n    \n    Parameters\n    ----------\n    workflow: Workflow or None\n        a SNAP workflow object\n    node: Node or None\n        a SNAP node object\n    demName: str\n        The name of the auto-download DEM. Default is 'SRTM 1Sec HGT'. Is ignored when `externalDEMFile` is not None.\n        Supported options:\n        \n         - ACE2_5Min\n         - ACE30\n         - ASTER 1sec GDEM\n         - CDEM\n         - Copernicus 30m Global DEM\n         - Copernicus 90m Global DEM\n         - GETASSE30\n         - SRTM 1Sec Grid\n         - SRTM 1Sec HGT\n         - SRTM 3Sec\n    externalDEMFile: str or None, optional\n        The absolute path to an external DEM file. Default is None. Overrides `demName`.\n    externalDEMNoDataValue: int, float or None, optional\n        The no data value of the external DEM. If not specified (default) the function will try to read it from the\n        specified external DEM.\n    externalDEMApplyEGM: bool, optional\n        Apply Earth Gravitational Model to external DEM? Default is True.\n    demResamplingMethod: str\n        the DEM resampling method\n\n    Returns\n    -------\n\n    \"\"\"\n    # select DEM type\n    dempar = {'externalDEMFile': externalDEMFile,\n              'externalDEMApplyEGM': externalDEMApplyEGM,\n              'demResamplingMethod': demResamplingMethod}\n    if externalDEMFile is not None:\n        if os.path.isfile(externalDEMFile):\n            if externalDEMNoDataValue is None:\n                with Raster(externalDEMFile) as dem:\n                    dempar['externalDEMNoDataValue'] = dem.nodata\n                if dempar['externalDEMNoDataValue'] is None:\n                    raise RuntimeError('Cannot read NoData value from DEM file. '\n                                       'Please specify externalDEMNoDataValue')\n            else:\n                dempar['externalDEMNoDataValue'] = externalDEMNoDataValue\n            dempar['reGridMethod'] = False\n        else:\n            raise RuntimeError('specified externalDEMFile does not exist')\n        dempar['demName'] = 'External DEM'\n    else:\n        dempar['demName'] = demName\n        dempar['externalDEMFile'] = None\n        dempar['externalDEMNoDataValue'] = 0\n    \n    if workflow is not None:\n        for key, value in dempar.items():\n            workflow.set_par(key, value)\n    elif node is not None:\n        for key, value in dempar.items():\n            if key in node.parameters.keys():\n                node.parameters[key] = value\n    else:\n        raise RuntimeError(\"either 'workflow' or 'node must be defined'\")\n    \n    # download the EGM lookup table if necessary\n    if dempar['externalDEMApplyEGM']:\n        get_egm_lookup(geoid='EGM96', software='SNAP')\n"
  },
  {
    "path": "pyroSAR/snap/data/collect_suffices.py",
    "content": "##############################################################\n# SNAP source code scan for retrieving operator suffices\n\n# Copyright (c) 2020-2024, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n##############################################################\nimport os\nimport re\nimport subprocess as sp\nfrom spatialist.ancillary import finder\n\n\"\"\"\nThis script clones the SNAP source code from GitHub and reads the suffices for SNAP operators.\nE.g. The operator Terrain-Flattening has a suffix TF. If Terrain-Flattening is added to a workflow\nin SNAP's graph builder, this suffix is appended to the automatically created output file name.\nAs pyroSAR also automatically creates file names with processing step suffices, it is convenient to just\nuse those defined by SNAP.\nCurrently I am not aware of any way to retrieve them directly from a SNAP installation.\nA question in the STEP forum is asked: https://forum.step.esa.int/t/snappy-get-operator-product-suffix/22885\n\nFeel free to contact me if you have ideas on how to improve this!\n\"\"\"\n\n\ndef main():\n    # some arbitrary directory for the source code\n    workdir = os.path.join(os.path.expanduser('~'), '.pyrosar', 'snap_code')\n    os.makedirs(workdir, exist_ok=True)\n    \n    # the name of the Java properties file containing the operator-suffix lookup\n    outfile = 'snap.suffices.properties'\n    \n    # clone all relevant toolboxes\n    for tbx in ['snap-engine', 'snap-desktop', 'microwave-toolbox']:\n        print(tbx)\n        target = os.path.join(workdir, tbx)\n        if not os.path.isdir(target):\n            url = 'https://github.com/senbox-org/{}'.format(tbx)\n            sp.check_call(['git', 'clone', '--depth', '1', url], cwd=workdir)\n        else:\n            sp.check_call(['git', 'pull', '--depth', '1'], cwd=target)\n    \n    # search patterns for relevant files\n    # Usually files containing operator classes are named <operator>Op.java but without dashes\n    # e.g. TerrainFlatteningOp.java for the Terrain-Flattening operator\n    # One exception is Calibration for which there is a subclass for each SAR sensor\n    operators = finder(workdir, ['*Op.java', 'BaseCalibrator.java'])\n    \n    # a list for collection the suffices\n    collect = []\n    \n    for op in operators:\n        print(op)\n        with open(op, encoding='utf8') as infile:\n            content = infile.read()\n        \n        # the suffix is defined as a class attribute PRODUCT_SUFFIX\n        pattern = 'String PRODUCT_SUFFIX = \\\"_([a-zA-Z]*)\\\"'\n        match = re.search(pattern, content)\n        if match:\n            suffix = match.groups()[0]\n        else:\n            suffix = ''\n        \n        # the name of the operator as available in the UI\n        pattern = r'\\@OperatorMetadata\\(alias = \\\"([a-zA-Z-]*)\\\"'\n        match = re.search(pattern, content)\n        if match:\n            alias = match.groups()[0]\n        else:\n            alias = None\n        \n        if suffix == 'Cal':\n            alias = 'Calibration'\n        \n        # only collect operators for which an alias exists, i.e. which are exposed in the UI,\n        # and for which a suffix is defined. In the UI, all operators for which no suffix exists\n        # will just get no suffix in any written file.\n        if alias is not None and suffix != '':\n            print(alias, suffix)\n            collect.append('{0}={1}'.format(alias, suffix))\n    \n    print('found {} matching operators'.format(len(collect)))\n    \n    with open(outfile, 'w') as out:\n        out.write('\\n'.join(sorted(collect, key=str.lower)))\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "pyroSAR/snap/data/snap.auxdata.properties",
    "content": "####################################################################################\n# Configuration for the Auxdata paths\n# All properties described here can also be passed to the VM as system\n# properties using the standard Java\n#   -D<property>=<value>\n# syntax. In addition, it is possible to use macros of the form\n#   ${<property>}\n# within a value. Macros will expand to the value of the referred file property,\n# system property, or environment variable.\n####################################################################################\n\n#AuxDataPath = c:\\\\AuxData2\ndemPath = ${AuxDataPath}/dem\n\nDEM.aceDEMDataPath = ${demPath}/ACE_DEM/ACE\nDEM.aceDEM_HTTP = http://step.esa.int/auxdata/dem/ACE30/\nDEM.ace2_5MinDEMDataPath = ${demPath}/ACE2/5M_HEIGHTS\nDEM.ace2_5MinDEM_HTTP = http://step.esa.int/auxdata/dem/ACE2/5M/\nDEM.CDEM_HTTP = http://step.esa.int/auxdata/dem/cdem/\nDEM.egm96_HTTP = http://step.esa.int/auxdata/dem/egm96/\nDEM.gtopo30DEMDataPath = ${demPath}/GTOPO30/dem\nDEM.AsterDEMDataPath = ${demPath}/ASTER\nDEM.Getasse30DEMDataPath = ${demPath}/GETASSE30\nDEM.srtm3GeoTiffDEMDataPath = ${demPath}/SRTM_DEM/tiff\nDEM.srtm3GeoTiffDEM_FTP = xftp.jrc.it\nDEM.srtm3GeoTiffDEM_remotePath = /pub/srtmV4/tiff/\nDEM.srtm3GeoTiffDEM_HTTP = https://download.esa.int/step/auxdata/dem/SRTM90/tiff/\nDEM.srtm1HgtDEM_HTTP = http://step.esa.int/auxdata/dem/SRTMGL1/\nDEM.srtm1GridDEMDataPath =\n\nlandCoverPath = ${AuxDataPath}/LandCover\nLandCover.glc2000DataPath = ${landCoverPath}/glc2000\nLandCover.globcoverDataPath = ${landCoverPath}/globcover\n\nOrbitFiles.dorisHTTP_vor_remotePath = http://step.esa.int/auxdata/orbits/Doris/vor\nOrbitFiles.dorisVOROrbitPath = ${AuxDataPath}/Orbits/Doris/vor\nOrbitFiles.dorisPOROrbitPath = ${AuxDataPath}/Orbits/Doris/por\n\nOrbitFiles.delftEnvisatOrbitPath = ${AuxDataPath}/Orbits/Delft Precise Orbits/ODR.ENVISAT1/eigen-cg03c\nOrbitFiles.delftERS1OrbitPath = ${AuxDataPath}/Orbits/Delft Precise Orbits/ODR.ERS-1/dgm-e04\nOrbitFiles.delftERS2OrbitPath = ${AuxDataPath}/Orbits/Delft Precise Orbits/ODR.ERS-2/dgm-e04\n\nOrbitFiles.delftFTP = dutlru2.lr.tudelft.nl\nOrbitFiles.delftFTP_ENVISAT_precise_remotePath = /pub/orbits/ODR.ENVISAT1/eigen-cg03c/\nOrbitFiles.delftFTP_ERS1_precise_remotePath = /pub/orbits/ODR.ERS-1/dgm-e04/\nOrbitFiles.delftFTP_ERS2_precise_remotePath = /pub/orbits/ODR.ERS-2/dgm-e04/\n\nOrbitFiles.prareHTTP_ERS1_remotePath = http://step.esa.int/auxdata/orbits/ers_precise_orb/ERS1\nOrbitFiles.prareHTTP_ERS2_remotePath = http://step.esa.int/auxdata/orbits/ers_precise_orb/ERS2\nOrbitFiles.prareERS1OrbitPath = ${AuxDataPath}/Orbits/ers_precise_orb/ERS1\nOrbitFiles.prareERS2OrbitPath = ${AuxDataPath}/Orbits/ers_precise_orb/ERS2\n\nOrbitFiles.sentinel1POEOrbitPath = ${AuxDataPath}/Orbits/Sentinel-1/POEORB\nOrbitFiles.sentinel1RESOrbitPath = ${AuxDataPath}/Orbits/Sentinel-1/RESORB\nOrbitFiles.sentinel1POEOrbit_remotePath = http://step.esa.int/auxdata/orbits/Sentinel-1/POEORB/\nOrbitFiles.sentinel1RESOrbit_remotePath = http://step.esa.int/auxdata/orbits/Sentinel-1/RESORB/\n\nAuxCal.Sentinel1.remotePath = http://step.esa.int/auxdata/auxcal/S1/\nAuxCal.ENVISAT.remotePath = http://step.esa.int/auxdata/auxcal/ENVISAT/\nAuxCal.ERS.remotePath = http://step.esa.int/auxdata/auxcal/ERS/\n"
  },
  {
    "path": "pyroSAR/snap/data/snap.suffices.properties",
    "content": "AdaptiveThresholding=THR\nALOS-Deskewing=DSk\nApply-Orbit-File=Orb\nBack-Geocoding=Stack\nCalibration=Cal\nCoherence=Coh\nCP-Stokes-Parameters=Stokes\nCreateStack=Stack\nDEM-Assisted-Coregistration=Stack\nDemodulate=Demod\nEllipsoid-Correction-GG=EC\nEllipsoid-Correction-RD=EC\nFlip=Flip\nGLCM=GLCM\nGoldsteinPhaseFiltering=Flt\nHorizontalVerticalMotion=hvm\nInterferogram=Ifg\nIonosphericCorrection=iono\nKNN-Classifier=KNNClass\nLinearToFromdB=dB\nMaximum-Likelihood-Classifier=MLClass\nMinimum-Distance-Classifier=MDClass\nMulti-Temporal-Speckle-Filter=Spk\nMultilook=ML\nMultiMasterInSAR=mmifg\nMultitemporal-Compositing=MC\nNaive-Bayes-Classifier=NB\nObject-Discrimination=SHP\nOffset-Tracking=Vel\nOversample=Ovr\nPhaseFilter=Flt\nPhaseToDisplacement=Disp\nPhaseToElevation=Hgt\nPhaseToHeight=Hgt\nPolarimetric-Classification=Class\nPolarimetric-Parameters=PP\nPolarimetric-Speckle-Filter=Spk\nRandom-Forest-Classifier=RF\nRemodulate=Remod\nSARSim-Terrain-Correction=TC\nSliceAssembly=Asm\nSM-Dielectric-Modeling=SM\nSpeckle-Divergence=SpkDiv\nSpeckle-Filter=Spk\nSRGR=SRGR\nSupervised-Wishart-Classification=Class\nSVM-Classifier=SVMClass\nTerrain-Correction=TC\nTerrain-Flattening=TF\nThermalNoiseRemoval=NR\nThree-passDInSAR=DInSAR\nTopoPhaseRemoval=DInSAR\nTOPSAR-Deburst=Deb\nTOPSAR-Merge=mrg\nUndersample=Udr"
  },
  {
    "path": "pyroSAR/snap/recipes/base.xml",
    "content": "<graph id=\"Graph\">\n    <version>1.0</version>\n    <node id=\"Read\">\n        <operator>Read</operator>\n        <sources/>\n        <parameters class=\"com.bc.ceres.binding.dom.XppDomElement\">\n            <file>S1A_IW_GRDH_1SDV_20141220T155633_20141220T155658_003805_0048BB_CE9B.zip</file>\n            <formatName>SENTINEL-1</formatName>\n        </parameters>\n    </node>\n    <node id=\"Apply-Orbit-File\">\n        <operator>Apply-Orbit-File</operator>\n        <sources>\n            <sourceProduct refid=\"Read\"/>\n        </sources>\n        <parameters class=\"com.bc.ceres.binding.dom.XppDomElement\">\n            <orbitType>Sentinel Precise (Auto Download)</orbitType>\n            <polyDegree>3</polyDegree>\n            <continueOnFail>false</continueOnFail>\n        </parameters>\n    </node>\n    <node id=\"Calibration\">\n        <operator>Calibration</operator>\n        <sources>\n            <sourceProduct refid=\"Apply-Orbit-File\"/>\n        </sources>\n        <parameters class=\"com.bc.ceres.binding.dom.XppDomElement\">\n            <sourceBands/>\n            <auxFile>Product Auxiliary File</auxFile>\n            <externalAuxFile/>\n            <outputImageInComplex>false</outputImageInComplex>\n            <outputImageScaleInDb>false</outputImageScaleInDb>\n            <createGammaBand>false</createGammaBand>\n            <createBetaBand>false</createBetaBand>\n            <selectedPolarisations>VH,VV</selectedPolarisations>\n            <outputSigmaBand>false</outputSigmaBand>\n            <outputGammaBand>false</outputGammaBand>\n            <outputBetaBand>false</outputBetaBand>\n        </parameters>\n    </node>\n    <node id=\"Write\">\n        <operator>Write</operator>\n        <sources>\n            <sourceProduct refid=\"Calibration\"/>\n        </sources>\n        <parameters class=\"com.bc.ceres.binding.dom.XppDomElement\">\n            <file>E:\\DATA\\SWOS\\SNAP\\test\\output\\S1A_IW_GRDH_1SDV_20141005T052515_20141005T052540_002690_003012_763E.tif</file>\n            <formatName>GeoTIFF</formatName>\n        </parameters>\n    </node>\n    <applicationData id=\"Presentation\">\n        <Description/>\n        <node id=\"Read\">\n            <displayPosition x=\"13.0\" y=\"123.0\"/>\n        </node>\n        <node id=\"Apply-Orbit-File\">\n            <displayPosition x=\"239.0\" y=\"141.0\"/>\n        </node>\n        <node id=\"Calibration\">\n            <displayPosition x=\"340.0\" y=\"24.0\"/>\n        </node>\n        <node id=\"Terrain-Flattening\">\n            <displayPosition x=\"402.0\" y=\"159.0\"/>\n        </node>\n        <node id=\"Terrain-Correction\">\n            <displayPosition x=\"489.0\" y=\"33.0\"/>\n        </node>\n        <node id=\"Write\">\n            <displayPosition x=\"706.0\" y=\"118.0\"/>\n        </node>\n    </applicationData>\n</graph>\n"
  },
  {
    "path": "pyroSAR/snap/recipes/blank.xml",
    "content": "<graph id=\"Graph\">\n    <version>1.0</version>\n    <applicationData id=\"Presentation\">\n        <Description/>\n    </applicationData>\n</graph>\n"
  },
  {
    "path": "pyroSAR/snap/util.py",
    "content": "###############################################################################\n# Convenience functions for SAR image batch processing with ESA SNAP\n\n# Copyright (c) 2016-2025, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\nimport os\nimport re\nimport shutil\nimport traceback\nfrom ..drivers import identify, identify_many, ID\nfrom .auxil import parse_recipe, parse_node, gpt, groupbyWorkers, writer, \\\n    windows_fileprefix, orb_parametrize, geo_parametrize, sub_parametrize, \\\n    mli_parametrize, dem_parametrize\n\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef geocode(infile, outdir, t_srs=4326, spacing=20, polarizations='all', shapefile=None, scaling='dB',\n            geocoding_type='Range-Doppler', removeS1BorderNoise=True, removeS1BorderNoiseMethod='pyroSAR',\n            removeS1ThermalNoise=True, offset=None, allow_RES_OSV=False, demName='SRTM 1Sec HGT',\n            externalDEMFile=None, externalDEMNoDataValue=None, externalDEMApplyEGM=True, terrainFlattening=True,\n            basename_extensions=None, test=False, export_extra=None, groupsize=1, cleanup=True, tmpdir=None,\n            gpt_exceptions=None, gpt_args=None, returnWF=False, nodataValueAtSea=True,\n            demResamplingMethod='BILINEAR_INTERPOLATION', imgResamplingMethod='BILINEAR_INTERPOLATION',\n            alignToStandardGrid=False, standardGridOriginX=0, standardGridOriginY=0,\n            speckleFilter=False, refarea='gamma0', clean_edges=False, clean_edges_npixels=1,\n            rlks=None, azlks=None, dem_oversampling_multiple=2, s1_osv_url_option=1):\n    \"\"\"\n    general function for geocoding of SAR backscatter images with SNAP.\n    \n    This function performs the following steps:\n    \n    - (if necessary) identify the SAR scene(s) passed via argument `infile` (:func:`pyroSAR.drivers.identify`)\n    - (if necessary) create the directories defined via `outdir` and `tmpdir`\n    - (if necessary) download Sentinel-1 OSV files\n    - parse a SNAP workflow (:class:`pyroSAR.snap.auxil.Workflow`)\n    - write the workflow to an XML file in `outdir`\n    - execute the workflow (:func:`pyroSAR.snap.auxil.gpt`)\n\n    Note\n    ----\n    The function may create workflows with multiple `Write` nodes. All nodes are parametrized to write data in ENVI format,\n    in which case the node parameter `file` is going to be a directory. All nodes will use the same temporary directory,\n    which will be created in `tmpdir`.\n    Its name is created from the basename of the `infile` (:meth:`pyroSAR.drivers.ID.outname_base`)\n    and a suffix identifying each processing node of the workflow (:meth:`pyroSAR.snap.auxil.Workflow.suffix`).\n    \n    For example: `S1A__IW___A_20180101T170648_NR_Orb_Cal_ML_TF_TC`.\n    \n    Parameters\n    ----------\n    infile: str or ~pyroSAR.drivers.ID or list\n        The SAR scene(s) to be processed; multiple scenes are treated as consecutive acquisitions, which will be\n        mosaicked with SNAP's SliceAssembly operator.\n    outdir: str\n        The directory to write the final files to.\n    t_srs: int or str or osgeo.osr.SpatialReference\n        A target spatial reference system in WKT, EPSG, PROJ4 or OPENGIS format.\n        See function :func:`spatialist.auxil.crsConvert()` for details.\n        Default: `4326 <https://spatialreference.org/ref/epsg/4326/>`_.\n    spacing: int or float, optional\n        The target pixel spacing in meters. Default is 20\n    polarizations: list[str] or str\n        The polarizations to be processed; can be a string for a single polarization, e.g. 'VV', or a list of several\n        polarizations, e.g. ['VV', 'VH']. With the special value 'all' (default) all available polarizations are\n        processed.\n    shapefile: str or :class:`~spatialist.vector.Vector` or dict, optional\n        A vector geometry for spatial subsetting:\n        \n         - :class:`~spatialist.vector.Vector`: a vector object in arbitrary CRS\n         - :class:`str`: a name of a file that can be read with :class:`~spatialist.vector.Vector` in arbitrary CRS\n         - :class:`dict`: a dictionary with keys `xmin`, `xmax`, `ymin`, `ymax` in EPSG:4326 coordinates\n    scaling: {'dB', 'db', 'linear'}, optional\n        Should the output be in linear or decibel scaling? Default is 'dB'.\n    geocoding_type: {'Range-Doppler', 'SAR simulation cross correlation'}, optional\n        The type of geocoding applied; can be either 'Range-Doppler' (default) or 'SAR simulation cross correlation'\n    removeS1BorderNoise: bool, optional\n        Enables removal of S1 GRD border noise (default). Will be ignored if SLC scenes are processed.\n    removeS1BorderNoiseMethod: str, optional\n        The border noise removal method to be applied if `removeS1BorderNoise` is True.\n        See :func:`pyroSAR.S1.removeGRDBorderNoise` for details. One of the following:\n        \n         - 'ESA': the pure implementation as described by ESA\n         - 'pyroSAR': the ESA method plus the custom pyroSAR refinement (default)\n    removeS1ThermalNoise: bool, optional\n        Enables removal of S1 thermal noise (default).\n    offset: tuple, optional\n        A tuple defining offsets for left, right, top and bottom in pixels, e.g. (100, 100, 0, 0); this variable is\n        overridden if a shapefile is defined. Default is None.\n    allow_RES_OSV: bool\n        (only applies to Sentinel-1) Also allow the less accurate RES orbit files to be used?\n        The function first tries to download a POE file for the scene.\n        If this fails and RES files are allowed, it will download the RES file.\n        The selected OSV type is written to the workflow XML file.\n        Processing is aborted if the correction fails (Apply-Orbit-File parameter continueOnFail set to false).\n    demName: str\n        The name of the auto-download DEM. Default is 'SRTM 1Sec HGT'. Is ignored when `externalDEMFile` is not None.\n        Supported options:\n        \n         - ACE2_5Min\n         - ACE30\n         - ASTER 1sec GDEM\n         - CDEM\n         - Copernicus 30m Global DEM\n         - Copernicus 90m Global DEM\n         - GETASSE30\n         - SRTM 1Sec Grid\n         - SRTM 1Sec HGT\n         - SRTM 3Sec\n    externalDEMFile: str or None, optional\n        The absolute path to an external DEM file. Default is None. Overrides `demName`.\n    externalDEMNoDataValue: int, float or None, optional\n        The no data value of the external DEM. If not specified (default) the function will try to read it from the\n        specified external DEM.\n    externalDEMApplyEGM: bool, optional\n        Apply Earth Gravitational Model to external DEM? Default is True.\n    terrainFlattening: bool\n        Apply topographic normalization on the data?\n    basename_extensions: list of str or None\n        Names of additional parameters to append to the basename, e.g. ['orbitNumber_rel'].\n    test: bool, optional\n        If set to True the workflow xml file is only written and not executed. Default is False.\n    export_extra: list or None\n        A list of image file IDs to be exported to outdir. The following IDs are currently supported:\n        \n         - incidenceAngleFromEllipsoid\n         - localIncidenceAngle\n         - projectedLocalIncidenceAngle\n         - DEM\n         - layoverShadowMask\n         - scatteringArea (requires ``terrainFlattening=True``)\n         - gammaSigmaRatio (requires ``terrainFlattening=True`` and ``refarea=['sigma0', 'gamma0']``)\n    groupsize: int\n        The number of workers executed together in one gpt call.\n    cleanup: bool\n        Should all files written to the temporary directory during function execution be deleted after processing?\n        Default is True.\n    tmpdir: str or None\n        Path of custom temporary directory, useful to separate output folder and temp folder. If `None`, the `outdir`\n        location will be used. The created subdirectory will be deleted after processing if ``cleanup=True``.\n    gpt_exceptions: dict or None\n        A dictionary to override the configured GPT executable for certain operators;\n        each (sub-)workflow containing this operator will be executed with the define executable;\n        \n         - e.g. ``{'Terrain-Flattening': '/home/user/snap/bin/gpt'}``\n    gpt_args: list or None\n        A list of additional arguments to be passed to the gpt call.\n        \n        - e.g. ``['-x', '-c', '2048M']`` for increased tile cache size and intermediate clearing\n    returnWF: bool\n        Return the full name of the written workflow XML file?\n    nodataValueAtSea: bool\n        Mask pixels acquired over sea? The sea mask depends on the selected DEM.\n    demResamplingMethod: str\n        One of the following:\n        \n         - 'NEAREST_NEIGHBOUR'\n         - 'BILINEAR_INTERPOLATION'\n         - 'CUBIC_CONVOLUTION'\n         - 'BISINC_5_POINT_INTERPOLATION'\n         - 'BISINC_11_POINT_INTERPOLATION'\n         - 'BISINC_21_POINT_INTERPOLATION'\n         - 'BICUBIC_INTERPOLATION'\n    imgResamplingMethod: str\n        The resampling method for geocoding the SAR image; the options are identical to demResamplingMethod.\n    alignToStandardGrid: bool\n        Align all processed images to a common grid?\n    standardGridOriginX: int or float\n        The x origin value for grid alignment\n    standardGridOriginY: int or float\n        The y origin value for grid alignment\n    speckleFilter: str\n        One of the following:\n        \n         - 'Boxcar'\n         - 'Median'\n         - 'Frost'\n         - 'Gamma Map'\n         - 'Refined Lee'\n         - 'Lee'\n         - 'Lee Sigma'\n    refarea: str or list\n        'sigma0', 'gamma0' or a list of both\n    clean_edges: bool\n        erode noisy image edges? See :func:`pyroSAR.snap.auxil.erode_edges`.\n        Does not apply to layover-shadow mask.\n    clean_edges_npixels: int\n        the number of pixels to erode.\n    rlks: int or None\n        the number of range looks. If not None, overrides the computation done by function\n        :func:`pyroSAR.ancillary.multilook_factors` based on the image pixel spacing and the target spacing.\n    azlks: int or None\n        the number of azimuth looks. Like `rlks`.\n    dem_oversampling_multiple: int\n        a factor to multiply the DEM oversampling factor computed by SNAP.\n        Used only for terrain flattening.\n        The SNAP default of 1 has been found to be insufficient with stripe\n        artifacts remaining in the image.\n    s1_osv_url_option: int\n        the OSV download URL option; see :meth:`pyroSAR.S1.OSV.catch`\n    \n    Returns\n    -------\n    str or None\n        Either the name of the workflow file if ``returnWF == True`` or None otherwise\n    \n    \n    .. figure:: figures/snap_geocode.svg\n        :align: center\n        \n        Function geocode workflow diagram for processing Sentinel-1 scenes.\n        Dashed lines depict optional steps. The output is sigma or gamma nought\n        backscatter with ellipsoid or radiometric terrain correction (suffix elp/rtc)\n        as well as several optional ancillary datasets (controlled via argument `export_extra`).\n\n    Examples\n    --------\n    geocode a Sentinel-1 scene and export the local incidence angle map with it\n\n    >>> from pyroSAR.snap import geocode\n    >>> filename = 'S1A_IW_GRDH_1SDV_20180829T170656_20180829T170721_023464_028DE0_F7BD.zip'\n    >>> geocode(infile=filename, outdir='outdir', spacing=20, scaling='dB',\n    >>>         export_extra=['DEM', 'localIncidenceAngle'], t_srs=4326)\n\n    See Also\n    --------\n    :class:`pyroSAR.drivers.ID`,\n    :class:`spatialist.vector.Vector`,\n    :func:`spatialist.auxil.crsConvert()`\n    \"\"\"\n    if clean_edges:\n        try:\n            import scipy\n        except ImportError:\n            raise RuntimeError('please install scipy to clean edges')\n    \n    if isinstance(infile, ID):\n        id = infile\n        ids = [id]\n    elif isinstance(infile, str):\n        id = identify(infile)\n        ids = [id]\n    elif isinstance(infile, list):\n        ids = identify_many(infile, sortkey='start')\n        id = ids[0]\n    else:\n        raise TypeError(\"'infile' must be of type str, list or pyroSAR.ID\")\n    \n    if id.is_processed(outdir):\n        log.info('scene {} already processed'.format(id.outname_base()))\n        return\n    \n    if not os.path.isdir(outdir):\n        os.makedirs(outdir)\n    ############################################\n    # general setup\n    process_S1_SLC = False\n    \n    if id.sensor in ['ASAR', 'ERS1', 'ERS2']:\n        formatName = 'ENVISAT'\n    elif re.search('S1[A-Z]', id.sensor):\n        if id.product == 'SLC':\n            removeS1BorderNoise = False\n            process_S1_SLC = True\n        formatName = 'SENTINEL-1'\n    else:\n        raise RuntimeError('sensor not supported (yet)')\n    \n    # several options like resampling are modified globally for the whole workflow at the end of this function\n    # this list gathers IDs of nodes for which this should not be done because they are configured individually\n    resampling_exceptions = []\n    ######################\n    if isinstance(polarizations, str):\n        if polarizations == 'all':\n            polarizations = id.polarizations\n        else:\n            if polarizations in id.polarizations:\n                polarizations = [polarizations]\n            else:\n                raise RuntimeError('polarization {} does not exists in the source product'.format(polarizations))\n    elif isinstance(polarizations, list):\n        polarizations = [x for x in polarizations if x in id.polarizations]\n    else:\n        raise RuntimeError('polarizations must be of type str or list')\n    \n    swaths = None\n    if process_S1_SLC:\n        if id.acquisition_mode == 'IW':\n            swaths = ['IW1', 'IW2', 'IW3']\n        elif id.acquisition_mode == 'EW':\n            swaths = ['EW1', 'EW2', 'EW3', 'EW4', 'EW5']\n        elif re.search('S[1-6]', id.acquisition_mode):\n            pass\n        else:\n            raise RuntimeError('acquisition mode {} not supported'.format(id.acquisition_mode))\n    \n    bandnames = dict()\n    bandnames['beta0'] = ['Beta0_' + x for x in polarizations]\n    bandnames['gamma0'] = ['Gamma0_' + x for x in polarizations]\n    bandnames['sigma0'] = ['Sigma0_' + x for x in polarizations]\n    bandnames['int'] = ['Intensity_' + x for x in polarizations]\n    ############################################\n    ############################################\n    # parse base workflow\n    workflow = parse_recipe('blank')\n    ############################################\n    if not isinstance(infile, list):\n        infile = [infile]\n    \n    last = None\n    bands = []\n    bands_extra = []\n    pol_ref = polarizations[0]\n    collect = []\n    for i in range(0, len(infile)):\n        ############################################\n        # Read node configuration\n        read = parse_node('Read')\n        workflow.insert_node(read)\n        read.parameters['file'] = ids[i].scene\n        read.parameters['formatName'] = formatName\n        last = read\n        ############################################\n        # Remove-GRD-Border-Noise node configuration\n        if id.sensor in ['S1A', 'S1B', 'S1C', 'S1D'] and id.product == 'GRD' and removeS1BorderNoise:\n            bn = parse_node('Remove-GRD-Border-Noise')\n            workflow.insert_node(bn, before=last.id)\n            bn.parameters['selectedPolarisations'] = polarizations\n            last = bn\n        ############################################\n        # Calibration node configuration\n        cal = parse_node('Calibration')\n        workflow.insert_node(cal, before=last.id)\n        cal.parameters['auxFile'] = 'Latest Auxiliary File'\n        # leave the selectedPolarisations field empty when processing all polarizations\n        if len(polarizations) == len(id.polarizations):\n            polarizations_cal = None\n        else:\n            polarizations_cal = polarizations\n        cal.parameters['selectedPolarisations'] = polarizations_cal\n        # choose the intensity band(s)\n        c1 = id.sensor == 'ASAR' and id.acquisition_mode in ['IMP', 'WSM']\n        c2 = id.sensor in ['ERS1', 'ERS2']\n        if c1 or c2:\n                cal.parameters['sourceBands'] = 'Intensity'\n        else:\n            cal.parameters['sourceBands'] = [f'Intensity_{x}' for x in polarizations]\n        \n        cal.parameters['outputBetaBand'] = False\n        cal.parameters['outputSigmaBand'] = False\n        cal.parameters['outputGammaBand'] = False\n        cal.parameters['createBetaBand'] = False\n        cal.parameters['createGammaBand'] = False\n        if isinstance(refarea, str):\n            refarea = [refarea]\n        for item in refarea:\n            if item not in ['sigma0', 'gamma0']:\n                raise ValueError('unsupported value for refarea: {}'.format(item))\n        if terrainFlattening:\n            cal.parameters['outputBetaBand'] = True\n        else:\n            for opt in refarea:\n                cal.parameters['output{}Band'.format(opt[:-1].capitalize())] = True\n        # I don't think this is needed (introduced by Ricardo Noguera some while ago)\n        # if id.sensor in ['ERS1', 'ERS2', 'ASAR']:\n        #     cal.parameters['createBetaBand'] = True\n        if len(bands) == 0:\n            bands = [x for x in ['Beta', 'Sigma', 'Gamma']\n                     if cal.parameters[f'output{x}Band'] == 'true']\n            bands = [f'{x}0_{pol}' for x in bands for pol in polarizations]\n        last = cal\n        ############################################\n        # ThermalNoiseRemoval node configuration\n        if id.sensor in ['S1A', 'S1B', 'S1C', 'S1D'] and removeS1ThermalNoise:\n            tn = parse_node('ThermalNoiseRemoval')\n            workflow.insert_node(tn, before=last.id)\n            tn.parameters['selectedPolarisations'] = polarizations\n            last = tn\n        collect.append(last.id)\n    ############################################\n    # SliceAssembly node configuration\n    if len(collect) > 1:\n        sliceAssembly = parse_node('SliceAssembly')\n        sliceAssembly.parameters['selectedPolarisations'] = polarizations\n        workflow.insert_node(sliceAssembly, before=collect)\n        last = sliceAssembly\n    ############################################\n    # TOPSAR-Deburst node configuration\n    if process_S1_SLC and swaths is not None:\n        deb = parse_node('TOPSAR-Deburst')\n        workflow.insert_node(deb, before=last.id)\n        deb.parameters['selectedPolarisations'] = polarizations\n        last = deb\n    ############################################\n    # Apply-Orbit-File node configuration\n    continue_on_fail = False\n    orb = orb_parametrize(scene=id, formatName=formatName, allow_RES_OSV=allow_RES_OSV,\n                          url_option=s1_osv_url_option, continueOnFail=continue_on_fail)\n    workflow.insert_node(orb, before=last.id)\n    last = orb\n    ############################################\n    # Subset node configuration\n    if shapefile is not None or offset is not None:\n        sub = sub_parametrize(scene=id, geometry=shapefile, offset=offset, buffer=0.01)\n        workflow.insert_node(sub, before=last.id)\n        last = sub\n    ############################################\n    # Multilook node configuration\n    # I don't think this is needed (introduced by Ricardo Noguera some while ago)\n    # if id.sensor in ['ERS1', 'ERS2', 'ASAR']:\n    #     bands = bandnames['beta0']\n    # else:\n    ml = mli_parametrize(scene=id, spacing=spacing, rlks=rlks,\n                         azlks=azlks, sourceBands=bands)\n    if ml is not None:\n        workflow.insert_node(ml, before=last.id)\n        last = ml\n    ############################################\n    # geocoding node(s) configuration\n    tc_options = ['incidenceAngleFromEllipsoid',\n                  'localIncidenceAngle',\n                  'projectedLocalIncidenceAngle',\n                  'DEM',\n                  'layoverShadowMask']\n    if export_extra is not None:\n        tc_export_extra = [x for x in export_extra if x in tc_options]\n    else:\n        tc_export_extra = None\n    tc = geo_parametrize(spacing=spacing, t_srs=t_srs,\n                         tc_method=geocoding_type, sourceBands=bands,\n                         alignToStandardGrid=alignToStandardGrid,\n                         standardGridOriginX=standardGridOriginX,\n                         standardGridOriginY=standardGridOriginY,\n                         export_extra=tc_export_extra)\n    if isinstance(tc, list):\n        sarsim, cc, warp, tc = tc\n        workflow.insert_node([sarsim, cc, warp], before=last.id)\n        last = warp\n    ############################################\n    # Terrain-Flattening node configuration\n    tf = None\n    if terrainFlattening:\n        tf = parse_node('Terrain-Flattening')\n        workflow.insert_node(tf, before=last.id)\n        sources = [x for x in bands if x.startswith('Beta')]\n        tf.parameters['sourceBands'] = sources\n        bands = [x.replace('Beta', 'Gamma') for x in bands]\n        tf.parameters['oversamplingMultiple'] = dem_oversampling_multiple\n        if 'reGridMethod' in tf.parameters.keys():\n            if externalDEMFile is None:\n                tf.parameters['reGridMethod'] = True\n            else:\n                tf.parameters['reGridMethod'] = False\n        if 'sigma0' in refarea:\n            try:\n                tf.parameters['outputSigma0'] = True\n                bands_sigma = [x.replace('Gamma', 'Sigma') for x in bands]\n                bands.extend(bands_sigma)\n            except KeyError:\n                raise RuntimeError(\"The Terrain-Flattening node does not accept \"\n                                   \"parameter 'outputSigma0'. Please update SNAP.\")\n        last = tf\n    \n    ############################################\n    # merge bands to pass them to Terrain-Correction\n    # - Gamma0_* bands from Terrain-Flattening\n    # - layover_shadow_mask from SAR-Simulation\n    \n    def bandmerge(basename, band_dict, workflow, subset=False):\n        bands_long = []\n        for node, bands in band_dict.items():\n            if not isinstance(bands, list):\n                raise RuntimeError(\"The values of the 'band_dict' parameter must be a list.\")\n            for band in bands:\n                comp = [band + '::']\n                if subset:\n                    comp.append('Subset_')\n                comp.append(basename)\n                comp.append('_' + workflow.suffix(stop=node))\n                bands_long.append(''.join(comp))\n        merge = parse_node('BandMerge')\n        merge.parameters['sourceBands'] = bands_long\n        return merge\n    \n    bm_tc = None\n    if terrainFlattening and len(bands_extra) > 0:\n        basename = os.path.basename(os.path.splitext(id.scene)[0])\n        band_dict = {'Terrain-Flattening': bands,\n                     tf.source: bands_extra}\n        bm_tc = bandmerge(basename, band_dict, workflow, subset=False)\n        workflow.insert_node(bm_tc, before=[last.source, last.id])\n        last = bm_tc\n    ############################################\n    # Speckle-Filter node configuration\n    speckleFilter_options = ['Boxcar',\n                             'Median',\n                             'Frost',\n                             'Gamma Map',\n                             'Refined Lee',\n                             'Lee',\n                             'Lee Sigma']\n    \n    if speckleFilter:\n        message = '{0} must be one of the following:\\n- {1}'\n        if speckleFilter not in speckleFilter_options:\n            raise ValueError(message.format('speckleFilter', '\\n- '.join(speckleFilter_options)))\n        sf = parse_node('Speckle-Filter')\n        workflow.insert_node(sf, before=last.id)\n        sf.parameters['sourceBands'] = bands\n        sf.parameters['filter'] = speckleFilter\n        last = sf\n    ############################################\n    # insert terrain correction node\n    tc.parameters['sourceBands'] = bands + bands_extra\n    workflow.insert_node(tc, before=last.id)\n    last = tc\n    ############################################\n    # (optionally) add node for conversion from linear to db scaling\n    if scaling not in ['dB', 'db', 'linear']:\n        raise RuntimeError('scaling must be  a string of either \"dB\", \"db\" or \"linear\"')\n    \n    if scaling in ['dB', 'db']:\n        lin2db = parse_node('LinearToFromdB')\n        workflow.insert_node(lin2db, before=last.id)\n        lin2db.parameters['sourceBands'] = bands\n        last = lin2db\n    ############################################\n    # parametrize write node\n    # create a suffix for the output file to identify processing steps performed in the workflow\n    suffix = workflow.suffix()\n    if tmpdir is None:\n        tmpdir = outdir\n    basename = os.path.join(tmpdir, id.outname_base(basename_extensions))\n    outname = basename + '_' + suffix\n    \n    write = parse_node('Write')\n    workflow.insert_node(write, before=last.id)\n    write.parameters['file'] = outname\n    write.parameters['formatName'] = 'ENVI'\n    ############################################\n    ############################################\n    if export_extra is not None:\n        for item in export_extra:\n            if item in tc_options:\n                if item == 'DEM':\n                    bands_extra.append('elevation')\n                else:\n                    bands_extra.append(item)\n            elif item == 'scatteringArea':\n                if not terrainFlattening:\n                    raise RuntimeError('scatteringArea can only be created if terrain flattening is performed')\n                \n                base = os.path.basename(os.path.splitext(id.scene)[0])\n                subset = shapefile is not None\n                band_dict = {'Terrain-Flattening': [f'Gamma0_{pol_ref}'],\n                             tf.source: [f'Beta0_{pol_ref}']}\n                merge = bandmerge(basename=base, band_dict=band_dict,\n                                   workflow=workflow, subset=subset)\n                workflow.insert_node(merge, before=[tf.id, tf.source],\n                                     resetSuccessorSource=False)\n                \n                math = parse_node('BandMaths')\n                workflow.insert_node(math, before=merge.id, resetSuccessorSource=False)\n                \n                area = 'scatteringArea_{0}'.format(pol_ref)\n                expression = 'Beta0_{0} / Gamma0_{0}'.format(pol_ref)\n                \n                math.parameters.clear_variables()\n                exp = math.parameters['targetBands'][0]\n                exp['name'] = area\n                exp['type'] = 'float32'\n                exp['expression'] = expression\n                exp['noDataValue'] = 0.0\n                \n                # modify the bm_tc band merge node if it exists or create a new band merge node\n                if bm_tc is not None:\n                    bm_tc.source = bm_tc.source + [math.id]\n                else:\n                    bm_tc = parse_node('BandMerge')\n                    workflow.insert_node(bm_tc, before=[tf.id, math.id], resetSuccessorSource=False)\n                    tc.source = bm_tc.id\n                \n                # modify Terrain-Correction source bands\n                tc_bands = tc.parameters['sourceBands'] + ',' + area\n                tc.parameters['sourceBands'] = tc_bands\n                \n                # add scattering area to the list of bands directly written from Terrain-Correction\n                bands_extra.append(area)\n            elif item == 'gammaSigmaRatio':\n                if not terrainFlattening:\n                    raise RuntimeError('gammaSigmaRatio can only be created if terrain flattening is performed')\n                if sorted(refarea) != ['gamma0', 'sigma0']:\n                    raise ValueError(\"For export_extra layer 'gammaSigmaRatio' 'refarea' \"\n                                     \"must contain both sigma0 and gamma0\")\n                math = parse_node('BandMaths')\n                workflow.insert_node(math, before=tf.id, resetSuccessorSource=False)\n                \n                ratio = 'gammaSigmaRatio_{0}'.format(pol_ref)\n                expression = 'Sigma0_{0} / Gamma0_{0}'.format(pol_ref)\n                \n                math.parameters.clear_variables()\n                exp = math.parameters['targetBands'][0]\n                exp['name'] = ratio\n                exp['type'] = 'float32'\n                exp['expression'] = expression\n                exp['noDataValue'] = 0.0\n                \n                # modify the bm_tc band merge node if it exists or create a new band merge node\n                if bm_tc is not None:\n                    bm_tc.source = bm_tc.source + [math.id]\n                else:\n                    bm_tc = parse_node('BandMerge')\n                    workflow.insert_node(bm_tc, before=[tf.id, math.id], resetSuccessorSource=False)\n                    tc.source = bm_tc.id\n                \n                tc_bands = tc.parameters['sourceBands'] + ',' + ratio\n                tc.parameters['sourceBands'] = tc_bands\n                \n                # add scattering Area to the list of bands directly written from Terrain-Correction\n                bands_extra.append(ratio)\n            else:\n                raise RuntimeError(\"ID '{}' not valid for argument 'export_extra'\".format(item))\n        # directly write export_extra layers to avoid dB scaling\n        if scaling in ['db', 'dB'] and len(bands_extra) > 0:\n            tc_write = parse_node('Write')\n            workflow.insert_node(tc_write, before=tc.id, resetSuccessorSource=False)\n            tc_write.parameters['file'] = outname\n            tc_write.parameters['formatName'] = 'ENVI'\n            tc_select = parse_node('BandSelect')\n            workflow.insert_node(tc_select, after=tc_write.id)\n            tc_select.parameters['sourceBands'] = bands_extra\n    ############################################\n    ############################################\n    # DEM handling\n    dem_parametrize(workflow=workflow, demName=demName,\n                    externalDEMFile=externalDEMFile,\n                    externalDEMNoDataValue=externalDEMNoDataValue,\n                    externalDEMApplyEGM=externalDEMApplyEGM)\n    ############################################\n    ############################################\n    # configure the resampling methods\n    \n    options_img = ['NEAREST_NEIGHBOUR',\n                   'BILINEAR_INTERPOLATION',\n                   'CUBIC_CONVOLUTION',\n                   'BISINC_5_POINT_INTERPOLATION',\n                   'BISINC_11_POINT_INTERPOLATION',\n                   'BISINC_21_POINT_INTERPOLATION',\n                   'BICUBIC_INTERPOLATION']\n    options_dem = options_img + ['DELAUNAY_INTERPOLATION']\n    \n    message = '{0} must be one of the following:\\n- {1}'\n    if demResamplingMethod not in options_dem:\n        raise ValueError(message.format('demResamplingMethod', '\\n- '.join(options_dem)))\n    if imgResamplingMethod not in options_img:\n        raise ValueError(message.format('imgResamplingMethod', '\\n- '.join(options_img)))\n    \n    workflow.set_par('demResamplingMethod', demResamplingMethod)\n    workflow.set_par('imgResamplingMethod', imgResamplingMethod,\n                     exceptions=resampling_exceptions)\n    ############################################\n    ############################################\n    # additional parameter settings applied to the whole workflow\n    \n    workflow.set_par('nodataValueAtSea', nodataValueAtSea)\n    ############################################\n    ############################################\n    # write workflow to file and optionally execute it\n    log.debug('writing workflow to file')\n    \n    wf_name = outname.replace(tmpdir, outdir) + '_proc.xml'\n    workflow.write(wf_name)\n    \n    # execute the newly written workflow\n    if not test:\n        try:\n            groups = groupbyWorkers(wf_name, groupsize)\n            gpt(wf_name, groups=groups, cleanup=cleanup, tmpdir=outname,\n                gpt_exceptions=gpt_exceptions, gpt_args=gpt_args,\n                removeS1BorderNoiseMethod=removeS1BorderNoiseMethod)\n            writer(xmlfile=wf_name, outdir=outdir, basename_extensions=basename_extensions,\n                   clean_edges=clean_edges, clean_edges_npixels=clean_edges_npixels)\n        except:\n            tb = traceback.format_exc()\n            with open(wf_name.replace('_proc.xml', '_error.log'), 'w') as logfile:\n                logfile.write(tb)\n        finally:\n            if cleanup and os.path.isdir(outname):\n                log.info('deleting temporary files')\n                shutil.rmtree(outname, onerror=windows_fileprefix)\n        log.info('done')\n    if returnWF:\n        return wf_name\n\n\ndef noise_power(infile, outdir, polarizations, spacing, t_srs, refarea='sigma0', tmpdir=None, test=False, cleanup=True,\n                demName='SRTM 1Sec HGT', externalDEMFile=None, externalDEMNoDataValue=None, externalDEMApplyEGM=True,\n                alignToStandardGrid=False, standardGridOriginX=0, standardGridOriginY=0, groupsize=1,\n                clean_edges=False, clean_edges_npixels=1, rlks=None, azlks=None, osv_url_option=1):\n    \"\"\"\n    Generate Sentinel-1 noise power images for each polarization, calibrated to either beta, sigma or gamma nought.\n    The written GeoTIFF files will carry the suffix NEBZ, NESZ or NEGZ respectively.\n\n    Parameters\n    ----------\n    infile: str\n        The SAR scene(s) to be processed\n    outdir: str\n        The directory to write the final files to.\n    polarizations: list[str]\n        The polarizations to be processed, e.g. ['VV', 'VH'].\n    spacing: int or float\n        The target pixel spacing in meters.\n    t_srs: int or str or osgeo.osr.SpatialReference\n        A target spatial reference system in WKT, EPSG, PROJ4 or OPENGIS format.\n    refarea: str\n        either 'beta0', 'gamma0' or 'sigma0'.\n    tmpdir: str\n        Path of custom temporary directory, useful to separate output folder and temp folder. If `None`, the `outdir`\n        location will be used. The created subdirectory will be deleted after processing if ``cleanup=True``.\n    test: bool\n        If set to True the workflow xml file is only written and not executed. Default is False.\n    cleanup: bool\n        Should all files written to the temporary directory during function execution be deleted after processing?\n        Default is True.\n    demName: str\n        The name of the auto-download DEM. Default is 'SRTM 1Sec HGT'. Is ignored when `externalDEMFile` is not None.\n        Supported options:\n        \n         - ACE2_5Min\n         - ACE30\n         - ASTER 1sec GDEM\n         - CDEM\n         - Copernicus 30m Global DEM\n         - Copernicus 90m Global DEM\n         - GETASSE30\n         - SRTM 1Sec Grid\n         - SRTM 1Sec HGT\n         - SRTM 3Sec\n    externalDEMFile: str or None, optional\n        The absolute path to an external DEM file. Default is None. Overrides `demName`.\n    externalDEMNoDataValue: int, float or None, optional\n        The no data value of the external DEM. If not specified (default) the function will try to read it from the\n        specified external DEM.\n    externalDEMApplyEGM: bool, optional\n        Apply Earth Gravitational Model to external DEM? Default is True.\n    alignToStandardGrid: bool\n        Align all processed images to a common grid?\n    standardGridOriginX: int or float\n        The x origin value for grid alignment\n    standardGridOriginY: int or float\n        The y origin value for grid alignment\n    groupsize: int\n        The number of workers executed together in one gpt call.\n    clean_edges: bool\n        erode noisy image edges? See :func:`pyroSAR.snap.auxil.erode_edges`.\n        Does not apply to layover-shadow mask.\n    clean_edges_npixels: int\n        the number of pixels to erode.\n    rlks: int or None\n        the number of range looks. If not None, overrides the computation done by function\n        :func:`pyroSAR.ancillary.multilook_factors` based on the image pixel spacing and the target spacing.\n    azlks: int or None\n        the number of azimuth looks. Like `rlks`.\n    osv_url_option: int\n        the OSV download URL option; see :meth:`pyroSAR.S1.OSV.catch`\n    \n    Returns\n    -------\n\n    \"\"\"\n    if clean_edges:\n        try:\n            import scipy\n        except ImportError:\n            raise RuntimeError('please install scipy to clean edges')\n    \n    if refarea not in ['beta0', 'sigma0', 'gamma0']:\n        raise ValueError('refarea not supported')\n    \n    id = identify(infile)\n    \n    if id.sensor not in ['S1A', 'S1B', 'S1C', 'S1D']:\n        raise RuntimeError('this function is for Sentinel-1 only')\n    \n    os.makedirs(outdir, exist_ok=True)\n    if tmpdir is not None:\n        os.makedirs(tmpdir, exist_ok=True)\n    \n    wf = parse_recipe('blank')\n    \n    read = parse_node('Read')\n    read.parameters['file'] = infile\n    wf.insert_node(read)\n    ############################################\n    orb = orb_parametrize(scene=id, workflow=wf, before=read.id,\n                          formatName='SENTINEL-1', allow_RES_OSV=True,\n                          url_option=osv_url_option)\n    ############################################\n    cal = parse_node('Calibration')\n    wf.insert_node(cal, before=orb.id)\n    cal.parameters['selectedPolarisations'] = polarizations\n    cal.parameters['outputBetaBand'] = False\n    cal.parameters['outputSigmaBand'] = False\n    cal.parameters['outputGammaBand'] = False\n    \n    inband = refarea.capitalize()\n    cal.parameters['output{}Band'.format(inband[:-1])] = True\n    \n    tnr = parse_node('ThermalNoiseRemoval')\n    wf.insert_node(tnr, before=cal.id)\n    if 'outputNoise' in tnr.parameters.keys():\n        tnr.parameters['outputNoise'] = True\n    last = tnr\n    ############################################\n    if id.product == 'SLC' and id.acquisition_mode in ['EW', 'IW']:\n        deb = parse_node('TOPSAR-Deburst')\n        wf.insert_node(deb, before=tnr.id)\n        last = deb\n    ############################################\n    select = parse_node('BandSelect')\n    wf.insert_node(select, before=last.id)\n    measure = 'NE{}Z'.format(refarea.capitalize()[0])\n    bands = ['{}_{}'.format(measure, pol) for pol in polarizations]\n    select.parameters['sourceBands'] = bands\n    last = select\n    ############################################\n    # Multilook node configuration\n    ml = mli_parametrize(scene=id, spacing=spacing, rlks=rlks, azlks=azlks)\n    if ml is not None:\n        wf.insert_node(ml, before=last.id)\n        last = ml\n    ############################################\n    tc = geo_parametrize(spacing=spacing, t_srs=t_srs, demName=demName,\n                         externalDEMFile=externalDEMFile,\n                         externalDEMNoDataValue=externalDEMNoDataValue,\n                         externalDEMApplyEGM=externalDEMApplyEGM,\n                         alignToStandardGrid=alignToStandardGrid,\n                         standardGridOriginX=standardGridOriginX,\n                         standardGridOriginY=standardGridOriginY)\n    wf.insert_node(tc, before=last.id)\n    last = tc\n    ############################################\n    \n    suffix = wf.suffix()\n    if tmpdir is None:\n        tmpdir = outdir\n    basename = id.outname_base() + '_' + suffix\n    procdir = os.path.join(tmpdir, basename)\n    outname = os.path.join(procdir, basename + '.dim')\n    \n    write = parse_node('Write')\n    wf.insert_node(write, before=last.id)\n    write.parameters['file'] = outname\n    write.parameters['formatName'] = 'BEAM-DIMAP'\n    \n    wf_name = os.path.join(outdir, basename + '_proc.xml')\n    wf.write(wf_name)\n    \n    if not test:\n        groups = groupbyWorkers(wf_name, groupsize)\n        gpt(xmlfile=wf_name, tmpdir=procdir, groups=groups, cleanup=cleanup)\n        writer(xmlfile=wf_name, outdir=outdir, clean_edges=clean_edges,\n               clean_edges_npixels=clean_edges_npixels)\n        if cleanup:\n            if os.path.isdir(procdir):\n                shutil.rmtree(procdir, onerror=windows_fileprefix)\n"
  },
  {
    "path": "pyroSAR/xml_util.py",
    "content": "###############################################################################\n# utility collection for xml file handling\n\n# Copyright (c) 2016-2018, the pyroSAR Developers.\n\n# This file is part of the pyroSAR Project. It is subject to the\n# license terms in the LICENSE.txt file found in the top-level\n# directory of this distribution and at\n# https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.\n# No part of the pyroSAR project, including this file, may be\n# copied, modified, propagated, or distributed except according\n# to the terms contained in the LICENSE.txt file.\n###############################################################################\n\nimport os\nimport re\nimport ast\nimport xml.etree.ElementTree as ET\n\n\nclass XMLHandler(object):\n    def __init__(self, xml):\n        errormessage = 'xmlfile must be a string pointing to an existing file, ' \\\n                       'a string or bytes object from which an xml can be parsed or a file object'\n        if 'readline' in dir(xml):\n            self.infile = xml.name if hasattr(xml, 'name') else None\n            xml.seek(0)\n            self.text = xml.read()\n            xml.seek(0)\n        elif isinstance(xml, (bytes, str)):\n            try:\n                isfile = os.path.isfile(xml)\n            except ValueError:\n                isfile = False\n            if isfile:\n                self.infile = xml\n                with open(xml, 'r') as infile:\n                    self.text = infile.read()\n            else:\n                try:\n                    tree = ET.fromstring(xml)\n                    self.infile = None\n                    self.text = str(xml)\n                    del tree\n                except ET.ParseError:\n                    raise RuntimeError(errormessage)\n        else:\n            raise RuntimeError(errormessage)\n        defs = re.findall('xmlns:[a-z0-9]+=\"[^\"]*\"', self.text)\n        dictstring = '{{{}}}'.format(re.sub(r'xmlns:([a-z0-9]*)=', r'\"\\1\":', ', '.join(defs)))\n        self.namespaces = ast.literal_eval(dictstring)\n\n    def restoreNamespaces(self):\n        for key, val in self.namespaces.items():\n            val_new = val.split('/')[-1]\n            self.text = self.text.replace(key, val_new)\n\n    def write(self, outname, mode):\n        with open(outname, mode) as out:\n            out.write(self.text)\n\n    def __enter__(self):\n        return self\n\n    def __exit__(self, exc_type, exc_val, exc_tb):\n        return\n\n\ndef getNamespaces(xmlfile):\n    with XMLHandler(xmlfile) as xml:\n        return xml.namespaces\n"
  },
  {
    "path": "readthedocs.yml",
    "content": "version: 2\n\nbuild:\n  os: \"ubuntu-lts-latest\"\n  tools:\n    python: \"mambaforge-latest\"\n\nconda:\n  environment: environment.yml\n\npython:\n  install:\n    - method: pip\n      path: .\n      extra_requirements:\n        - docs\n\nformats:\n  - epub\n  - pdf\n\nsphinx:\n  builder: html\n  configuration: docs/source/conf.py\n"
  },
  {
    "path": "requirements-dev.txt",
    "content": "-r requirements.txt\n\n#Testing requirements\npytest\n\n#Documentation requirements\nsphinx\nsphinx_rtd_theme\nsphinxcontrib-bibtex\nsphinxcontrib-svg2pdfconverter\ncairosvg\nsphinx-autodoc-typehints"
  },
  {
    "path": "requirements.txt",
    "content": "geoalchemy2<0.14.0\nlxml\nnumpy\npackaging\npillow\nprogressbar2\npsycopg2\npyyaml\nrequests\nshapely\nspatialist>=0.17.0\nsqlalchemy-utils>=0.37,<0.42\nsqlalchemy>=1.4,<2.0\n"
  },
  {
    "path": "tests/conftest.py",
    "content": "import os\nimport shutil\nimport pytest\nimport platform\nfrom pathlib import Path\nfrom pyroSAR.examine import ExamineSnap\n\n\n@pytest.fixture\ndef travis():\n    return 'TRAVIS' in os.environ.keys()\n\n\n@pytest.fixture\ndef appveyor():\n    return 'APPVEYOR' in os.environ.keys()\n\n\n@pytest.fixture\ndef testdir():\n    return os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')\n\n\n@pytest.fixture\ndef testdata(testdir):\n    out = {\n        # ASAR_IMS__A_20040703T205338, product: SLC, driver: ESA\n        'asar': os.path.join(testdir,\n                             'ASA_IMS_1PNESA20040703_205338_000000182028_00172_12250_00001672562030318361237.N1'),\n        # ERS1_IMP__A_19960808T205906, product: PRI, driver: ESA\n        'ers1_esa': os.path.join(testdir, 'SAR_IMP_1PXESA19960808_205906_00000017G158_00458_26498_2615.E1'),\n        # ERS1_IMS__D_19951220T024320, product: SLC, driver: CEOS_ERS\n        'ers1_ceos': os.path.join(testdir, 'SAR_IMS_1PXESA19951220_024320_00000015G152_00132_23166_0252.E1.zip'),\n        # PSR2_FBD__A_20140909T043342, product: 1.5, driver: CEOS_PSR\n        'psr2': os.path.join(testdir, '0000022708_001001_ALOS2015976960-140909.zip'),\n        # main scene for testing Sentinel-1 metadata reading and database ingestion\n        's1': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'),\n        # for test_snap.test_slice_assembly\n        's1_2': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20150222T170725_20150222T170750_004739_005DD8_CEAB.zip'),\n        # for testing database duplicate handling\n        's1_3': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20150203T043109_20150203T043134_004454_00574F_6D00.zip'),\n        # for testing database duplicate handling\n        's1_4': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20150203T043109_20150203T043134_004454_00574F_FEC3.zip'),\n        # used in test_osv\n        's1_orbit': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20210119T031653_20210119T031718_036201_043ED0_8255.zip'),\n        'tif': os.path.join(testdir, 'S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'),\n        'archive_old_csv': os.path.join(testdir, 'archive_outdated.csv'),\n        'archive_old_bbox': os.path.join(testdir, 'archive_outdated_bbox.db'),\n        'dempar': os.path.join(testdir, 'dem.par'),\n        'mlipar': os.path.join(testdir, 'mli.par')\n    }\n    return out\n\n\n@pytest.fixture\ndef auxdata_dem_cases():\n    cases = [\n        ('AW3D30', ['N050E010/N051E011.tar.gz']),\n        ('SRTM 1Sec HGT', ['https://step.esa.int/auxdata/dem/SRTMGL1/N51E011.SRTMGL1.hgt.zip']),\n        ('SRTM 3Sec', ['https://step.esa.int/auxdata/dem/SRTM90/tiff/srtm_39_02.zip']),\n        ('Copernicus 30m Global DEM', ['https://copernicus-dem-30m.s3.eu-central-1.amazonaws.com/'\n                                       'Copernicus_DSM_COG_10_N51_00_E011_00_DEM/'\n                                       'Copernicus_DSM_COG_10_N51_00_E011_00_DEM.tif'])\n        # ('TDX90m', ['DEM/N51/E010/TDM1_DEM__30_N51E011.zip'])\n    ]\n    return cases\n\n\n@pytest.fixture(scope='session', autouse=True)\ndef tmp_home(tmp_path_factory):\n    home = tmp_path_factory.mktemp('home')\n    snap = home / '.snap'\n    \n    if platform.system() == 'Windows':\n        roaming_snap = Path(os.environ['APPDATA']) / 'SNAP'\n        var_home = 'USERPROFILE'\n        roaming = home / 'AppData' / 'Roaming'\n        local = home / 'AppData' / 'Local'\n        roaming.mkdir(parents=True, exist_ok=True)\n        if roaming_snap.exists():\n            shutil.copytree(roaming_snap, roaming / 'SNAP')\n        local.mkdir(parents=True, exist_ok=True)\n        os.environ['APPDATA'] = str(roaming)\n        os.environ['LOCALAPPDATA'] = str(local)\n        os.environ['HOME'] = str(home)\n    else:\n        var_home = 'HOME'\n    os.environ[var_home] = str(home)\n    \n    assert os.path.expanduser('~') == str(home)\n    \n    snap_config = ExamineSnap()\n    snap_config.userpath = str(snap)\n    snap_config.auxdatapath = str(snap / 'auxdata')\n    \n    return home\n"
  },
  {
    "path": "tests/data/S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif.aux.xml",
    "content": "<PAMDataset>\n  <PAMRasterBand band=\"1\">\n    <Metadata>\n      <MDI key=\"STATISTICS_MAXIMUM\">1.4325850009918</MDI>\n      <MDI key=\"STATISTICS_MEAN\">-12.12492953445</MDI>\n      <MDI key=\"STATISTICS_MINIMUM\">-26.654710769653</MDI>\n      <MDI key=\"STATISTICS_STDDEV\">4.7382735947383</MDI>\n    </Metadata>\n  </PAMRasterBand>\n</PAMDataset>\n"
  },
  {
    "path": "tests/data/archive_outdated.csv",
    "content": "sensor;acquisition_mode;polarizations;scene;bbox\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150320T163021_20150320T163050_005117_006709_0AF7.zip;POLYGON ((29.722511 -25.761124,29.722511 -23.456081,32.624969 -23.456081,32.624969 -25.761124,29.722511 -25.761124))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150320T163050_20150320T163115_005117_006709_0488.zip;POLYGON ((29.326937 -24.026264,29.326937 -21.962896,32.128216 -21.962896,32.128216 -24.026264,29.326937 -24.026264))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150401T163018_20150401T163043_005292_006B1C_1686.zip;POLYGON ((29.840227 -25.969412,29.840227 -23.886841,32.738415 -23.886841,32.738415 -25.969412,29.840227 -25.969412))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150401T163043_20150401T163108_005292_006B1C_1E81.zip;POLYGON ((29.446041 -24.470673,29.446041 -22.393301,32.299873 -22.393301,32.299873 -24.470673,29.446041 -24.470673))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150401T163108_20150401T163135_005292_006B1C_1DB7.zip;POLYGON ((29.021605 -22.96912,29.021605 -20.753139,31.878654 -20.753139,31.878654 -22.96912,29.021605 -22.96912))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150413T163015_20150413T163040_005467_006FA1_EAFF.zip;POLYGON ((29.896677 -26.166262,29.896677 -24.087826,32.771732 -24.087826,32.771732 -26.166262,29.896677 -26.166262))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150413T163040_20150413T163105_005467_006FA1_5C0F.zip;POLYGON ((29.499765 -24.6668,29.499765 -22.594812,32.335613 -22.594812,32.335613 -24.6668,29.499765 -24.6668))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150413T163105_20150413T163130_005467_006FA1_B14F.zip;POLYGON ((29.105707 -23.16585,29.105707 -21.089365,31.911707 -21.089365,31.911707 -23.16585,29.105707 -23.16585))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150425T163019_20150425T163044_005642_0073B3_2119.zip;POLYGON ((29.795837 -25.979542,29.795837 -23.883062,32.753532 -23.883062,32.753532 -25.979542,29.795837 -25.979542))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150425T163044_20150425T163109_005642_0073B3_3A06.zip;POLYGON ((29.402328 -24.480698,29.402328 -22.38957,32.314682 -22.38957,32.314682 -24.480698,29.402328 -24.480698))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150425T163109_20150425T163134_005642_0073B3_6E46.zip;POLYGON ((29.016043 -22.979046,29.016043 -20.894762,31.893469 -20.894762,31.893469 -22.979046,29.016043 -22.979046))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150507T163016_20150507T163041_005817_0077B6_2C74.zip;POLYGON ((29.896233 -26.16622,29.896233 -24.087814,32.771179 -24.087814,32.771179 -26.16622,29.896233 -26.16622))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150507T163041_20150507T163106_005817_0077B6_8691.zip;POLYGON ((29.49951 -24.666746,29.49951 -22.594776,32.334984 -22.594776,32.334984 -24.666746,29.49951 -24.666746))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150507T163106_20150507T163131_005817_0077B6_8C98.zip;POLYGON ((29.105513 -23.165745,29.105513 -21.090246,31.911242 -21.090246,31.911242 -23.165745,29.105513 -23.165745))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150519T163017_20150519T163042_005992_007B8F_3851.zip;POLYGON ((29.897038 -26.166157,29.897038 -24.087788,32.771793 -24.087788,32.771793 -26.166157,29.897038 -26.166157))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150519T163042_20150519T163107_005992_007B8F_47C6.zip;POLYGON ((29.500132 -24.666685,29.500132 -22.594763,32.335686 -22.594763,32.335686 -24.666685,29.500132 -24.666685))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150519T163107_20150519T163132_005992_007B8F_0579.zip;POLYGON ((29.106106 -23.165724,29.106106 -21.089394,31.911791 -21.089394,31.911791 -23.165724,29.106106 -23.165724))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150531T163019_20150531T163044_006167_008064_174F.zip;POLYGON ((29.877449 -26.103035,29.877449 -24.011805,32.807957 -24.011805,32.807957 -26.103035,29.877449 -26.103035))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150531T163044_20150531T163109_006167_008064_8196.zip;POLYGON ((29.483475 -24.603872,29.483475 -22.518173,32.369869 -22.518173,32.369869 -24.603872,29.483475 -24.603872))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150531T163109_20150531T163138_006167_008064_43E7.zip;POLYGON ((29.020607 -23.102253,29.020607 -20.745604,31.948956 -20.745604,31.948956 -23.102253,29.020607 -23.102253))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150612T163019_20150612T163044_006342_008583_2C8F.zip;POLYGON ((29.878511 -26.102922,29.878511 -24.011591,32.808624 -24.011591,32.808624 -26.102922,29.878511 -26.102922))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150612T163044_20150612T163109_006342_008583_825D.zip;POLYGON ((29.484146 -24.60367,29.484146 -22.518063,32.370762 -22.518063,32.370762 -24.60367,29.484146 -24.60367))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150612T163109_20150612T163139_006342_008583_1A63.zip;POLYGON ((29.021082 -23.102142,29.021082 -20.744972,31.94952 -20.744972,31.94952 -23.102142,29.021082 -23.102142))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150624T163109_20150624T163134_006517_008A81_AFA4.zip;POLYGON ((29.104725 -23.165815,29.104725 -21.081741,31.941303 -21.081741,31.941303 -23.165815,29.104725 -23.165815))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150706T163019_20150706T163044_006692_008F31_2A55.zip;POLYGON ((29.896751 -26.166161,29.896751 -24.087711,32.771801 -24.087711,32.771801 -26.166161,29.896751 -26.166161))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150706T163044_20150706T163109_006692_008F31_1B51.zip;POLYGON ((29.499834 -24.666697,29.499834 -22.594696,32.335682 -22.594696,32.335682 -24.666697,29.499834 -24.666697))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150706T163109_20150706T163134_006692_008F31_7269.zip;POLYGON ((29.105928 -23.165747,29.105928 -21.089869,31.911772 -21.089869,31.911772 -23.165747,29.105928 -23.165747))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150718T163020_20150718T163045_006867_00943D_0F15.zip;POLYGON ((29.8962 -26.166264,29.8962 -24.08787,32.771133 -24.08787,32.771133 -26.166264,29.8962 -26.166264))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150718T163045_20150718T163110_006867_00943D_B109.zip;POLYGON ((29.499496 -24.666796,29.499496 -22.59481,32.335049 -22.59481,32.335049 -24.666796,29.499496 -24.666796))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150718T163110_20150718T163135_006867_00943D_6A70.zip;POLYGON ((29.105318 -23.165798,29.105318 -21.089481,31.911324 -21.089481,31.911324 -23.165798,29.105318 -23.165798))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150730T163020_20150730T163045_007042_00992D_DE63.zip;POLYGON ((29.895981 -26.166233,29.895981 -24.087852,32.770905 -24.087852,32.770905 -26.166233,29.895981 -26.166233))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150730T163045_20150730T163110_007042_00992D_A821.zip;POLYGON ((29.499287 -24.666761,29.499287 -22.594816,32.334736 -22.594816,32.334736 -24.666761,29.499287 -24.666761))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150730T163110_20150730T163135_007042_00992D_92D1.zip;POLYGON ((29.104885 -23.165762,29.104885 -21.088505,31.911026 -21.088505,31.911026 -23.165762,29.104885 -23.165762))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150823T163022_20150823T163047_007392_00A2B0_8BE3.zip;POLYGON ((29.895638 -26.166166,29.895638 -24.087719,32.770679 -24.087719,32.770679 -26.166166,29.895638 -26.166166))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150823T163047_20150823T163112_007392_00A2B0_BF1D.zip;POLYGON ((29.498909 -24.6667,29.498909 -22.594666,32.334568 -22.594666,32.334568 -24.6667,29.498909 -24.6667))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150823T163112_20150823T163137_007392_00A2B0_D3B7.zip;POLYGON ((29.104836 -23.165707,29.104836 -21.089872,31.91082 -21.089872,31.91082 -23.165707,29.104836 -23.165707))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150916T163022_20150916T163047_007742_00AC27_DF3B.zip;POLYGON ((29.896545 -26.16621,29.896545 -24.087751,32.771675 -24.087751,32.771675 -26.16621,29.896545 -26.16621))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150916T163047_20150916T163112_007742_00AC27_9456.zip;POLYGON ((29.499653 -24.66674,29.499653 -22.594753,32.33548 -22.594753,32.33548 -24.66674,29.499653 -24.66674))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150916T163112_20150916T163137_007742_00AC27_20DD.zip;POLYGON ((29.10564 -23.165785,29.10564 -21.089365,31.91169 -21.089365,31.91169 -23.165785,29.10564 -23.165785))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150928T163023_20150928T163048_007917_00B0E7_CDB8.zip;POLYGON ((29.896198 -26.166267,29.896198 -24.087862,32.771217 -24.087862,32.771217 -26.166267,29.896198 -26.166267))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150928T163048_20150928T163113_007917_00B0E7_924D.zip;POLYGON ((29.499504 -24.666798,29.499504 -22.594824,32.335049 -22.594824,32.335049 -24.666798,29.499504 -24.666798))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20150928T163113_20150928T163138_007917_00B0E7_C965.zip;POLYGON ((29.105209 -23.165796,29.105209 -21.088936,31.91143 -21.088936,31.91143 -23.165796,29.105209 -23.165796))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151103T163023_20151103T163048_008442_00BEEF_6501.zip;POLYGON ((29.890478 -26.144796,29.890478 -24.062801,32.782341 -24.062801,32.782341 -26.144796,29.890478 -26.144796))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151103T163048_20151103T163113_008442_00BEEF_FA06.zip;POLYGON ((29.495895 -24.645571,29.495895 -22.569088,32.344746 -22.569088,32.344746 -24.645571,29.495895 -24.645571))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151103T163113_20151103T163138_008442_00BEEF_DB2D.zip;POLYGON ((29.106726 -23.144005,29.106726 -21.085703,31.923521 -21.085703,31.923521 -23.144005,29.106726 -23.144005))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151115T163017_20151115T163042_008617_00C3C4_0F29.zip;POLYGON ((29.983751 -26.475651,29.983751 -24.389345,32.886436 -24.389345,32.886436 -26.475651,29.983751 -26.475651))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151115T163042_20151115T163107_008617_00C3C4_6D51.zip;POLYGON ((29.57774 -24.97558,29.57774 -22.898914,32.449005 -22.898914,32.449005 -24.97558,29.57774 -24.97558))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151115T163107_20151115T163132_008617_00C3C4_E3E9.zip;POLYGON ((29.188725 -23.47658,29.188725 -21.414307,32.015793 -21.414307,32.015793 -23.47658,29.188725 -23.47658))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151127T163017_20151127T163042_008792_00C8A4_4629.zip;POLYGON ((29.984802 -26.468687,29.984802 -24.381861,32.884647 -24.381861,32.884647 -26.468687,29.984802 -26.468687))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151127T163042_20151127T163107_008792_00C8A4_A5C6.zip;POLYGON ((29.576237 -24.96805,29.576237 -22.891972,32.449924 -22.891972,32.449924 -24.96805,29.576237 -24.96805))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151127T163107_20151127T163132_008792_00C8A4_C1C0.zip;POLYGON ((29.188255 -23.469553,29.188255 -21.414734,32.014118 -21.414734,32.014118 -23.469553,29.188255 -23.469553))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151209T163017_20151209T163042_008967_00CDA5_BFFA.zip;POLYGON ((29.984684 -26.468592,29.984684 -24.381884,32.884411 -24.381884,32.884411 -26.468592,29.984684 -26.468592))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151209T163042_20151209T163107_008967_00CDA5_7DFF.zip;POLYGON ((29.576115 -24.968048,29.576115 -22.891996,32.449711 -22.891996,32.449711 -24.968048,29.576115 -24.968048))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151209T163107_20151209T163132_008967_00CDA5_D754.zip;POLYGON ((29.188021 -23.469553,29.188021 -21.414318,32.013905 -21.414318,32.013905 -23.469553,29.188021 -23.469553))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151221T163016_20151221T163041_009142_00D273_EBA6.zip;POLYGON ((29.98576 -26.468563,29.98576 -24.381729,32.885532 -24.381729,32.885532 -26.468563,29.98576 -26.468563))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151221T163041_20151221T163106_009142_00D273_059B.zip;POLYGON ((29.577154 -24.967936,29.577154 -22.891848,32.450771 -22.891848,32.450771 -24.967936,29.577154 -24.967936))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20151221T163106_20151221T163131_009142_00D273_16AF.zip;POLYGON ((29.188936 -23.469444,29.188936 -21.41382,32.014931 -21.41382,32.014931 -23.469444,29.188936 -23.469444))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160102T163016_20160102T163041_009317_00D771_5CE1.zip;POLYGON ((29.985262 -26.468515,29.985262 -24.381636,32.885139 -24.381636,32.885139 -26.468515,29.985262 -26.468515))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160102T163041_20160102T163106_009317_00D771_7CB0.zip;POLYGON ((29.576639 -24.96789,29.576639 -22.89176,32.450359 -22.89176,32.450359 -24.96789,29.576639 -24.96789))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160102T163106_20160102T163131_009317_00D771_8E63.zip;POLYGON ((29.18856 -23.469402,29.18856 -21.414356,32.0145 -21.414356,32.0145 -23.469402,29.18856 -23.469402))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160114T163015_20160114T163040_009492_00DC6F_3F1D.zip;POLYGON ((29.985516 -26.46862,29.985516 -24.381723,32.885483 -24.381723,32.885483 -26.46862,29.985516 -26.46862))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160114T163040_20160114T163105_009492_00DC6F_8BE8.zip;POLYGON ((29.576925 -24.967991,29.576925 -22.891933,32.450714 -22.891933,32.450714 -24.967991,29.576925 -24.967991))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160114T163105_20160114T163130_009492_00DC6F_7B4A.zip;POLYGON ((29.188921 -23.469591,29.188921 -21.414793,32.014889 -21.414793,32.014889 -23.469591,29.188921 -23.469591))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160126T163015_20160126T163040_009667_00E191_BF31.zip;POLYGON ((29.98587 -26.468697,29.98587 -24.381821,32.885822 -24.381821,32.885822 -26.468697,29.98587 -26.468697))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160126T163040_20160126T163105_009667_00E191_F497.zip;POLYGON ((29.577301 -24.968063,29.577301 -22.892027,32.451077 -22.892027,32.451077 -24.968063,29.577301 -24.968063))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160126T163105_20160126T163130_009667_00E191_4586.zip;POLYGON ((29.189318 -23.469662,29.189318 -21.414885,32.01527 -21.414885,32.01527 -23.469662,29.189318 -23.469662))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160207T163015_20160207T163040_009842_00E698_B86F.zip;POLYGON ((29.984554 -26.468782,29.984554 -24.381939,32.884491 -24.381939,32.884491 -26.468782,29.984554 -26.468782))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160207T163040_20160207T163105_009842_00E698_702B.zip;POLYGON ((29.575989 -24.968145,29.575989 -22.892075,32.44968 -22.892075,32.44968 -24.968145,29.575989 -24.968145))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160207T163105_20160207T163130_009842_00E698_43FF.zip;POLYGON ((29.187878 -23.469648,29.187878 -21.414307,32.013878 -21.414307,32.013878 -23.469648,29.187878 -23.469648))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160302T163015_20160302T163040_010192_00F0B4_2CE6.zip;POLYGON ((29.984974 -26.468695,29.984974 -24.381824,32.885101 -24.381824,32.885101 -26.468695,29.984974 -26.468695))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160302T163040_20160302T163105_010192_00F0B4_7E6C.zip;POLYGON ((29.576368 -24.968151,29.576368 -22.891966,32.450268 -22.891966,32.450268 -24.968151,29.576368 -24.968151))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160302T163105_20160302T163130_010192_00F0B4_9118.zip;POLYGON ((29.188372 -23.46966,29.188372 -21.414822,32.014423 -21.414822,32.014423 -23.46966,29.188372 -23.46966))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160314T163015_20160314T163040_010367_00F5D2_3180.zip;POLYGON ((29.985039 -26.468819,29.985039 -24.381834,32.885281 -24.381834,32.885281 -26.468819,29.985039 -26.468819))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160314T163040_20160314T163105_010367_00F5D2_6A98.zip;POLYGON ((29.576464 -24.968185,29.576464 -22.892038,32.450523 -22.892038,32.450523 -24.968185,29.576464 -24.968185))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160314T163105_20160314T163130_010367_00F5D2_D424.zip;POLYGON ((29.188429 -23.46978,29.188429 -21.414715,32.014709 -21.414715,32.014709 -23.46978,29.188429 -23.46978))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160326T163015_20160326T163040_010542_00FAAA_07B7.zip;POLYGON ((29.983774 -26.469028,29.983774 -24.382057,32.884087 -24.382057,32.884087 -26.469028,29.983774 -26.469028))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160326T163040_20160326T163105_010542_00FAAA_CDEB.zip;POLYGON ((29.575235 -24.968386,29.575235 -22.892277,32.449276 -22.892277,32.449276 -24.968386,29.575235 -24.968386))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160326T163105_20160326T163130_010542_00FAAA_E9CC.zip;POLYGON ((29.187195 -23.469973,29.187195 -21.41477,32.013496 -21.41477,32.013496 -23.469973,29.187195 -23.469973))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160407T163016_20160407T163041_010717_00FFCD_1263.zip;POLYGON ((29.983885 -26.469036,29.983885 -24.382095,32.884079 -24.382095,32.884079 -26.469036,29.983885 -26.469036))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160407T163041_20160407T163106_010717_00FFCD_B623.zip;POLYGON ((29.575363 -24.96839,29.575363 -22.892286,32.44939 -22.892286,32.44939 -24.96839,29.575363 -24.96839))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160407T163106_20160407T163130_010717_00FFCD_2DFF.zip;POLYGON ((29.187382 -23.469973,29.187382 -21.414953,32.013626 -21.414953,32.013626 -23.469973,29.187382 -23.469973))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160419T163016_20160419T163041_010892_010512_BE37.zip;POLYGON ((29.984301 -26.469084,29.984301 -24.382145,32.884499 -24.382145,32.884499 -26.469084,29.984301 -26.469084))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160419T163041_20160419T163106_010892_010512_93C4.zip;POLYGON ((29.575779 -24.968437,29.575779 -22.892338,32.44981 -22.892338,32.44981 -24.968437,29.575779 -24.968437))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160419T163106_20160419T163131_010892_010512_9485.zip;POLYGON ((29.187885 -23.47002,29.187885 -21.415358,32.014046 -21.415358,32.014046 -23.47002,29.187885 -23.47002))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160501T163017_20160501T163042_011067_010A9A_CFF2.zip;POLYGON ((29.939482 -26.479372,29.939482 -24.378469,32.898998 -24.378469,32.898998 -26.479372,29.939482 -26.479372))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160501T163042_20160501T163107_011067_010A9A_0071.zip;POLYGON ((29.531408 -24.978584,29.531408 -22.888714,32.464397 -22.888714,32.464397 -24.978584,29.531408 -24.978584))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160501T163107_20160501T163132_011067_010A9A_80BA.zip;POLYGON ((29.139896 -23.480072,29.139896 -21.395042,32.028358 -21.395042,32.028358 -23.480072,29.139896 -23.480072))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160513T163018_20160513T163043_011242_011021_1123.zip;POLYGON ((29.984646 -26.468893,29.984646 -24.381907,32.884865 -24.381907,32.884865 -26.468893,29.984646 -26.468893))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160513T163043_20160513T163108_011242_011021_F556.zip;POLYGON ((29.576084 -24.968252,29.576084 -22.892107,32.450134 -22.892107,32.450134 -24.968252,29.576084 -24.968252))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160513T163108_20160513T163132_011242_011021_1B7C.zip;POLYGON ((29.188042 -23.469843,29.188042 -21.41469,32.014332 -21.41469,32.014332 -23.469843,29.188042 -23.469843))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160525T163018_20160525T163043_011417_0115DE_7177.zip;POLYGON ((29.984583 -26.468987,29.984583 -24.382019,32.884888 -24.382019,32.884888 -26.468987,29.984583 -26.468987))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160525T163043_20160525T163108_011417_0115DE_F7BC.zip;POLYGON ((29.576052 -24.968342,29.576052 -22.892237,32.450085 -22.892237,32.450085 -24.968342,29.576052 -24.968342))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160525T163108_20160525T163133_011417_0115DE_4C1F.zip;POLYGON ((29.188263 -23.469929,29.188263 -21.415703,32.014313 -21.415703,32.014313 -23.469929,29.188263 -23.469929))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160606T163019_20160606T163044_011592_011B6C_7F00.zip;POLYGON ((29.984045 -26.469116,29.984045 -24.382162,32.884338 -24.382162,32.884338 -26.469116,29.984045 -26.469116))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160606T163044_20160606T163109_011592_011B6C_F49A.zip;POLYGON ((29.575529 -24.96847,29.575529 -22.892378,32.449554 -22.892378,32.449554 -24.96847,29.575529 -24.96847))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160606T163109_20160606T163134_011592_011B6C_ABD0.zip;POLYGON ((29.187597 -23.470053,29.187597 -21.41522,32.013798 -21.41522,32.013798 -23.470053,29.187597 -23.470053))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160630T163020_20160630T163045_011942_012679_B0AD.zip;POLYGON ((29.98428 -26.468801,29.98428 -24.381937,32.88446 -24.381937,32.88446 -26.468801,29.98428 -26.468801))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160630T163045_20160630T163110_011942_012679_381D.zip;POLYGON ((29.575726 -24.968246,29.575726 -22.892063,32.44968 -22.892063,32.44968 -24.968246,29.575726 -24.968246))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160630T163110_20160630T163135_011942_012679_5162.zip;POLYGON ((29.187624 -23.469742,29.187624 -21.414288,32.013885 -21.414288,32.013885 -23.469742,29.187624 -23.469742))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160712T163021_20160712T163046_012117_012C3D_2857.zip;POLYGON ((29.985006 -26.468851,29.985006 -24.381851,32.885334 -24.381851,32.885334 -26.468851,29.985006 -26.468851))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160712T163046_20160712T163111_012117_012C3D_D664.zip;POLYGON ((29.57645 -24.968212,29.57645 -22.892073,32.450497 -22.892073,32.450497 -24.968212,29.57645 -24.968212))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160712T163111_20160712T163135_012117_012C3D_A57F.zip;POLYGON ((29.188391 -23.469801,29.188391 -21.414566,32.014698 -21.414566,32.014698 -23.469801,29.188391 -23.469801))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160724T163021_20160724T163046_012292_0131E3_4006.zip;POLYGON ((29.984926 -26.468748,29.984926 -24.381905,32.885036 -24.381905,32.885036 -26.468748,29.984926 -26.468748))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160724T163046_20160724T163111_012292_0131E3_A55B.zip;POLYGON ((29.576363 -24.968201,29.576363 -22.892012,32.450325 -22.892012,32.450325 -24.968201,29.576363 -24.968201))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160724T163111_20160724T163136_012292_0131E3_0B62.zip;POLYGON ((29.188162 -23.469698,29.188162 -21.413883,32.014519 -21.413883,32.014519 -23.469698,29.188162 -23.469698))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160805T163022_20160805T163047_012467_0137BD_4BDC.zip;POLYGON ((29.98365 -26.468964,29.98365 -24.382038,32.883865 -24.382038,32.883865 -26.468964,29.98365 -26.468964))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160805T163047_20160805T163112_012467_0137BD_E897.zip;POLYGON ((29.575132 -24.968321,29.575132 -22.892229,32.449158 -22.892229,32.449158 -24.968321,29.575132 -24.968321))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160805T163112_20160805T163137_012467_0137BD_59E5.zip;POLYGON ((29.186975 -23.469906,29.186975 -21.414186,32.013397 -21.414186,32.013397 -23.469906,29.186975 -23.469906))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160817T163023_20160817T163048_012642_013D79_D3A5.zip;POLYGON ((29.984371 -26.468781,29.984371 -24.381962,32.884472 -24.381962,32.884472 -26.468781,29.984371 -26.468781))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160817T163048_20160817T163113_012642_013D79_20F1.zip;POLYGON ((29.575832 -24.968229,29.575832 -22.892065,32.44978 -22.892065,32.44978 -24.968229,29.575832 -24.968229))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160817T163113_20160817T163138_012642_013D79_7947.zip;POLYGON ((29.187611 -23.469723,29.187611 -21.413755,32.014 -21.413755,32.014 -23.469723,29.187611 -23.469723))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160829T163023_20160829T163048_012817_014369_D1F6.zip;POLYGON ((29.985334 -26.468668,29.985334 -24.381828,32.885433 -24.381828,32.885433 -26.468668,29.985334 -26.468668))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160829T163048_20160829T163113_012817_014369_FAF8.zip;POLYGON ((29.576773 -24.968119,29.576773 -22.891933,32.450733 -22.891933,32.450733 -24.968119,29.576773 -24.968119))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160829T163113_20160829T163138_012817_014369_8797.zip;POLYGON ((29.188463 -23.469614,29.188463 -21.413363,32.014931 -21.413363,32.014931 -23.469614,29.188463 -23.469614))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160910T163024_20160910T163049_012992_01490B_C667.zip;POLYGON ((29.984932 -26.468599,29.984932 -24.381737,32.885052 -24.381737,32.885052 -26.468599,29.984932 -26.468599))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160910T163049_20160910T163114_012992_01490B_13AE.zip;POLYGON ((29.576347 -24.968056,29.576347 -22.89185,32.450321 -22.89185,32.450321 -24.968056,29.576347 -24.968056))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160910T163114_20160910T163139_012992_01490B_D4E1.zip;POLYGON ((29.188036 -23.469557,29.188036 -21.41337,32.014496 -21.41337,32.014496 -23.469557,29.188036 -23.469557))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160922T163024_20160922T163049_013167_014EE1_F9F3.zip;POLYGON ((29.984777 -26.468544,29.984777 -24.381718,32.884811 -24.381718,32.884811 -26.468544,29.984777 -26.468544))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160922T163049_20160922T163114_013167_014EE1_FC22.zip;POLYGON ((29.576197 -24.968002,29.576197 -22.891827,32.450073 -22.891827,32.450073 -24.968002,29.576197 -24.968002))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20160922T163114_20160922T163139_013167_014EE1_4274.zip;POLYGON ((29.187891 -23.469503,29.187891 -21.413347,32.014256 -21.413347,32.014256 -23.469503,29.187891 -23.469503))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161004T163024_20161004T163049_013342_01546F_F4C0.zip;POLYGON ((29.984669 -26.468636,29.984669 -24.381775,32.884617 -24.381775,32.884617 -26.468636,29.984669 -26.468636))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161004T163049_20161004T163114_013342_01546F_F8D8.zip;POLYGON ((29.57613 -24.968,29.57613 -22.891973,32.449883 -22.891973,32.449883 -24.968,29.57613 -24.968))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161004T163114_20161004T163139_013342_01546F_D2BE.zip;POLYGON ((29.187819 -23.469591,29.187819 -21.413403,32.014103 -21.413403,32.014103 -23.469591,29.187819 -23.469591))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161016T163024_20161016T163049_013517_0159FA_01EC.zip;POLYGON ((29.984993 -26.468693,29.984993 -24.381842,32.884945 -24.381842,32.884945 -26.468693,29.984993 -26.468693))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161016T163049_20161016T163114_013517_0159FA_0346.zip;POLYGON ((29.576452 -24.96806,29.576452 -22.89204,32.450207 -22.89204,32.450207 -24.96806,29.576452 -24.96806))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161016T163114_20161016T163139_013517_0159FA_B71D.zip;POLYGON ((29.188097 -23.46965,29.188097 -21.413294,32.014431 -21.413294,32.014431 -23.46965,29.188097 -23.46965))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161028T163024_20161028T163049_013692_015F62_AA59.zip;POLYGON ((29.984715 -26.468636,29.984715 -24.381773,32.884655 -24.381773,32.884655 -26.468636,29.984715 -26.468636))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161028T163049_20161028T163114_013692_015F62_D023.zip;POLYGON ((29.576159 -24.968,29.576159 -22.891975,32.449924 -22.891975,32.449924 -24.968,29.576159 -24.968))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161028T163114_20161028T163139_013692_015F62_3DA5.zip;POLYGON ((29.188122 -23.469595,29.188122 -21.414564,32.014133 -21.414564,32.014133 -23.469595,29.188122 -23.469595))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161109T163024_20161109T163049_013867_0164E7_7C9A.zip;POLYGON ((29.985243 -26.468487,29.985243 -24.381594,32.885204 -24.381594,32.885204 -26.468487,29.985243 -26.468487))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161109T163049_20161109T163114_013867_0164E7_E787.zip;POLYGON ((29.576633 -24.967857,29.576633 -22.891737,32.450344 -22.891737,32.450344 -24.967857,29.576633 -24.967857))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161109T163114_20161109T163139_013867_0164E7_586A.zip;POLYGON ((29.18852 -23.469368,29.18852 -21.414152,32.0145 -21.414152,32.0145 -23.469368,29.18852 -23.469368))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161121T163024_20161121T163049_014042_016A3C_4094.zip;POLYGON ((29.985079 -26.468414,29.985079 -24.381504,32.885059 -24.381504,32.885059 -26.468414,29.985079 -26.468414))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161121T163049_20161121T163114_014042_016A3C_E340.zip;POLYGON ((29.576477 -24.967791,29.576477 -22.891739,32.450172 -22.891739,32.450172 -24.967791,29.576477 -24.967791))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161121T163114_20161121T163139_014042_016A3C_03C4.zip;POLYGON ((29.188238 -23.469393,29.188238 -21.413715,32.014336 -21.413715,32.014336 -23.469393,29.188238 -23.469393))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161203T163024_20161203T163049_014217_016FAE_6E2F.zip;POLYGON ((29.9846 -26.468592,29.9846 -24.381721,32.884563 -24.381721,32.884563 -26.468592,29.9846 -26.468592))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161203T163049_20161203T163114_014217_016FAE_9727.zip;POLYGON ((29.576002 -24.967964,29.576002 -22.891863,32.449711 -22.891863,32.449711 -24.967964,29.576002 -24.967964))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161203T163114_20161203T163138_014217_016FAE_73D0.zip;POLYGON ((29.187902 -23.469473,29.187902 -21.414278,32.013878 -21.414278,32.013878 -23.469473,29.187902 -23.469473))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161215T163023_20161215T163048_014392_01753D_4C47.zip;POLYGON ((29.984648 -26.46867,29.984648 -24.381847,32.884518 -24.381847,32.884518 -26.46867,29.984648 -26.46867))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161215T163048_20161215T163113_014392_01753D_F74D.zip;POLYGON ((29.576071 -24.96804,29.576071 -22.891964,32.449768 -22.891964,32.449768 -24.96804,29.576071 -24.96804))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161215T163113_20161215T163138_014392_01753D_269B.zip;POLYGON ((29.187859 -23.469547,29.187859 -21.413843,32.013954 -21.413843,32.013954 -23.469547,29.187859 -23.469547))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161227T163023_20161227T163048_014567_017AAC_2909.zip;POLYGON ((29.984375 -26.46867,29.984375 -24.381844,32.884228 -24.381844,32.884228 -26.46867,29.984375 -26.46867))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161227T163048_20161227T163113_014567_017AAC_A217.zip;POLYGON ((29.575796 -24.968035,29.575796 -22.891958,32.449493 -22.891958,32.449493 -24.968035,29.575796 -24.968035))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20161227T163113_20161227T163138_014567_017AAC_4E22.zip;POLYGON ((29.187757 -23.469542,29.187757 -21.414547,32.01368 -21.414547,32.01368 -23.469542,29.187757 -23.469542))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170108T163021_20170108T163046_014742_017FF9_F4EE.zip;POLYGON ((29.985577 -26.468565,29.985577 -24.381708,32.885452 -24.381708,32.885452 -26.468565,29.985577 -26.468565))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170108T163046_20170108T163111_014742_017FF9_7C35.zip;POLYGON ((29.576971 -24.967937,29.576971 -22.891829,32.450684 -22.891829,32.450684 -24.967937,29.576971 -24.967937))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170108T163111_20170108T163136_014742_017FF9_EF1C.zip;POLYGON ((29.188799 -23.469448,29.188799 -21.413979,32.014839 -21.413979,32.014839 -23.469448,29.188799 -23.469448))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170120T163021_20170120T163046_014917_01857C_D353.zip;POLYGON ((29.985912 -26.468513,29.985912 -24.381607,32.885891 -24.381607,32.885891 -26.468513,29.985912 -26.468513))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170120T163046_20170120T163111_014917_01857C_91A9.zip;POLYGON ((29.577311 -24.96789,29.577311 -22.891817,32.451107 -22.891817,32.451107 -24.96789,29.577311 -24.96789))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170120T163111_20170120T163136_014917_01857C_E4AF.zip;POLYGON ((29.189007 -23.469492,29.189007 -21.413528,32.015266 -21.413528,32.015266 -23.469492,29.189007 -23.469492))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170201T163021_20170201T163046_015092_018AC0_3B70.zip;POLYGON ((29.985559 -26.468489,29.985559 -24.381607,32.885441 -24.381607,32.885441 -26.468489,29.985559 -26.468489))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170201T163046_20170201T163111_015092_018AC0_B8F1.zip;POLYGON ((29.576963 -24.967865,29.576963 -22.891792,32.450752 -22.891792,32.450752 -24.967865,29.576963 -24.967865))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170201T163111_20170201T163135_015092_018AC0_0FB5.zip;POLYGON ((29.18873 -23.469465,29.18873 -21.413767,32.014919 -21.413767,32.014919 -23.469465,29.18873 -23.469465))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170213T163020_20170213T163045_015267_019044_B493.zip;POLYGON ((29.984644 -26.468693,29.984644 -24.38183,32.884598 -24.38183,32.884598 -26.468693,29.984644 -26.468693))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170213T163045_20170213T163110_015267_019044_B23C.zip;POLYGON ((29.576084 -24.96806,29.576084 -22.892033,32.449856 -22.892033,32.449856 -24.96806,29.576084 -24.96806))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170213T163110_20170213T163135_015267_019044_6BCD.zip;POLYGON ((29.18791 -23.469656,29.18791 -21.414091,32.014057 -21.414091,32.014057 -23.469656,29.18791 -23.469656))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170225T163020_20170225T163045_015442_019598_7700.zip;POLYGON ((29.984482 -26.468855,29.984482 -24.382048,32.884605 -24.382048,32.884605 -26.468855,29.984482 -26.468855))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170225T163045_20170225T163110_015442_019598_0437.zip;POLYGON ((29.575914 -24.968309,29.575914 -22.892181,32.449799 -22.892181,32.449799 -24.968309,29.575914 -24.968309))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170225T163110_20170225T163135_015442_019598_C2A0.zip;POLYGON ((29.18771 -23.469812,29.18771 -21.414057,32.013988 -21.414057,32.013988 -23.469812,29.18771 -23.469812))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170309T163020_20170309T163045_015617_019AE8_CC54.zip;POLYGON ((29.984692 -26.46899,29.984692 -24.382055,32.88493 -24.382055,32.88493 -26.46899,29.984692 -26.46899))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170309T163045_20170309T163110_015617_019AE8_64E8.zip;POLYGON ((29.576143 -24.968353,29.576143 -22.892254,32.450195 -22.892254,32.450195 -24.968353,29.576143 -24.968353))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170309T163110_20170309T163135_015617_019AE8_0CB7.zip;POLYGON ((29.187937 -23.469944,29.187937 -21.414127,32.014404 -21.414127,32.014404 -23.469944,29.187937 -23.469944))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170321T163021_20170321T163046_015792_01A01B_D701.zip;POLYGON ((29.985121 -26.468891,29.985121 -24.381893,32.885452 -24.381893,32.885452 -26.468891,29.985121 -26.468891))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170321T163046_20170321T163111_015792_01A01B_437D.zip;POLYGON ((29.57655 -24.968254,29.57655 -22.892118,32.450611 -22.892118,32.450611 -24.968254,29.57655 -24.968254))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170321T163111_20170321T163135_015792_01A01B_DBA7.zip;POLYGON ((29.188499 -23.469849,29.188499 -21.414707,32.014801 -21.414707,32.014801 -23.469849,29.188499 -23.469849))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170402T163021_20170402T163046_015967_01A54B_3BBE.zip;POLYGON ((29.984352 -26.469006,29.984352 -24.382078,32.884758 -24.382078,32.884758 -26.469006,29.984352 -26.469006))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170402T163046_20170402T163111_015967_01A54B_413E.zip;POLYGON ((29.575769 -24.968458,29.575769 -22.892191,32.450039 -22.892191,32.450039 -24.968458,29.575769 -24.968458))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170402T163111_20170402T163136_015967_01A54B_B061.zip;POLYGON ((29.187706 -23.469959,29.187706 -21.414686,32.01421 -21.414686,32.01421 -23.469959,29.187706 -23.469959))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170414T163022_20170414T163047_016142_01AAA1_5F9D.zip;POLYGON ((29.984194 -26.468998,29.984194 -24.38204,32.884422 -24.38204,32.884422 -26.468998,29.984194 -26.468998))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170414T163047_20170414T163112_016142_01AAA1_C6D1.zip;POLYGON ((29.575651 -24.968361,29.575651 -22.892235,32.449692 -22.892235,32.449692 -24.968361,29.575651 -24.968361))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170414T163112_20170414T163136_016142_01AAA1_AB82.zip;POLYGON ((29.187561 -23.469948,29.187561 -21.414551,32.013908 -21.414551,32.013908 -23.469948,29.187561 -23.469948))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170426T163022_20170426T163047_016317_01AFF8_82D7.zip;POLYGON ((29.984489 -26.46912,29.984489 -24.382181,32.884796 -24.382181,32.884796 -26.46912,29.984489 -26.46912))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170426T163047_20170426T163112_016317_01AFF8_0A75.zip;POLYGON ((29.575979 -24.968475,29.575979 -22.892397,32.450005 -22.892397,32.450005 -24.968475,29.575979 -24.968475))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170426T163112_20170426T163137_016317_01AFF8_1D18.zip;POLYGON ((29.187876 -23.47006,29.187876 -21.41453,32.014252 -21.41453,32.014252 -23.47006,29.187876 -23.47006))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170508T163023_20170508T163048_016492_01B54B_08AE.zip;POLYGON ((29.983624 -26.468981,29.983624 -24.382042,32.883835 -24.382042,32.883835 -26.468981,29.983624 -26.468981))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170508T163048_20170508T163113_016492_01B54B_FAF3.zip;POLYGON ((29.5751 -24.968338,29.5751 -22.892233,32.449127 -22.892233,32.449127 -24.968338,29.5751 -24.968338))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170508T163113_20170508T163138_016492_01B54B_454F.zip;POLYGON ((29.186962 -23.469923,29.186962 -21.41428,32.013363 -21.41428,32.013363 -23.469923,29.186962 -23.469923))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170520T163023_20170520T163048_016667_01BAA4_3F32.zip;POLYGON ((29.984266 -26.468946,29.984266 -24.381985,32.884487 -24.381985,32.884487 -26.468946,29.984266 -26.468946))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170520T163048_20170520T163113_016667_01BAA4_22E7.zip;POLYGON ((29.575722 -24.968306,29.575722 -22.892181,32.44976 -22.892181,32.44976 -24.968306,29.575722 -24.968306))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170520T163113_20170520T163138_016667_01BAA4_202B.zip;POLYGON ((29.187695 -23.469893,29.187695 -21.414761,32.013977 -21.414761,32.013977 -23.469893,29.187695 -23.469893))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170601T163024_20170601T163049_016842_01C00A_341C.zip;POLYGON ((29.985277 -26.468801,29.985277 -24.381817,32.885506 -24.381817,32.885506 -26.468801,29.985277 -26.468801))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170601T163049_20170601T163114_016842_01C00A_7B9F.zip;POLYGON ((29.576717 -24.968163,29.576717 -22.892014,32.450764 -22.892014,32.450764 -24.968163,29.576717 -24.968163))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170601T163114_20170601T163139_016842_01C00A_EBD7.zip;POLYGON ((29.18861 -23.469751,29.18861 -21.414331,32.014961 -21.414331,32.014961 -23.469751,29.18861 -23.469751))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170613T163025_20170613T163050_017017_01C578_78BC.zip;POLYGON ((29.984585 -26.468967,29.984585 -24.382019,32.884811 -24.382019,32.884811 -26.468967,29.984585 -26.468967))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170613T163050_20170613T163115_017017_01C578_551B.zip;POLYGON ((29.576048 -24.968327,29.576048 -22.892212,32.450085 -22.892212,32.450085 -24.968327,29.576048 -24.968327))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170613T163115_20170613T163140_017017_01C578_30F9.zip;POLYGON ((29.187786 -23.469912,29.187786 -21.413816,32.014309 -21.413816,32.014309 -23.469912,29.187786 -23.469912))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170625T163026_20170625T163051_017192_01CACF_3282.zip;POLYGON ((29.98415 -26.468987,29.98415 -24.382067,32.884357 -24.382067,32.884357 -26.468987,29.98415 -26.468987))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170625T163051_20170625T163116_017192_01CACF_DB92.zip;POLYGON ((29.57564 -24.968344,29.57564 -22.892256,32.449657 -22.892256,32.449657 -24.968344,29.57564 -24.968344))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170625T163116_20170625T163140_017192_01CACF_5DD7.zip;POLYGON ((29.187603 -23.469925,29.187603 -21.414654,32.013908 -21.414654,32.013908 -23.469925,29.187603 -23.469925))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170707T163026_20170707T163051_017367_01D010_76B6.zip;POLYGON ((29.984552 -26.46908,29.984552 -24.382132,32.884838 -24.382132,32.884838 -26.46908,29.984552 -26.46908))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170707T163051_20170707T163116_017367_01D010_6A15.zip;POLYGON ((29.57604 -24.968431,29.57604 -22.892323,32.450157 -22.892323,32.450157 -24.968431,29.57604 -24.968431))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170707T163116_20170707T163141_017367_01D010_4839.zip;POLYGON ((29.188204 -23.470015,29.188204 -21.41552,32.014404 -21.41552,32.014404 -23.470015,29.188204 -23.470015))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170719T163027_20170719T163052_017542_01D56B_AB6F.zip;POLYGON ((29.984827 -26.468967,29.984827 -24.382053,32.88522 -24.382053,32.88522 -26.468967,29.984827 -26.468967))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170719T163052_20170719T163117_017542_01D56B_21FD.zip;POLYGON ((29.576263 -24.968416,29.576263 -22.892181,32.450424 -22.892181,32.450424 -24.968416,29.576263 -24.968416))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170719T163117_20170719T163141_017542_01D56B_B26B.zip;POLYGON ((29.188044 -23.469912,29.188044 -21.413963,32.014614 -21.413963,32.014614 -23.469912,29.188044 -23.469912))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170731T163028_20170731T163053_017717_01DAC5_D54D.zip;POLYGON ((29.984356 -26.468863,29.984356 -24.381903,32.884583 -24.381903,32.884583 -26.468863,29.984356 -26.468863))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170731T163053_20170731T163118_017717_01DAC5_7001.zip;POLYGON ((29.575815 -24.968222,29.575815 -22.892099,32.449852 -22.892099,32.449852 -24.968222,29.575815 -24.968222))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170731T163118_20170731T163142_017717_01DAC5_F688.zip;POLYGON ((29.18755 -23.469809,29.18755 -21.413704,32.014069 -21.413704,32.014069 -23.469809,29.18755 -23.469809))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170812T163028_20170812T163053_017892_01E015_19BE.zip;POLYGON ((29.984343 -26.468819,29.984343 -24.381861,32.884575 -24.381861,32.884575 -26.468819,29.984343 -26.468819))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170812T163053_20170812T163118_017892_01E015_FBC4.zip;POLYGON ((29.575798 -24.968182,29.575798 -22.892056,32.449837 -22.892056,32.449837 -24.968182,29.575798 -24.968182))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170812T163118_20170812T163143_017892_01E015_2883.zip;POLYGON ((29.187506 -23.46977,29.187506 -21.413574,32.01405 -21.413574,32.01405 -23.46977,29.187506 -23.46977))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170824T163029_20170824T163054_018067_01E55F_E619.zip;POLYGON ((29.984594 -26.468813,29.984594 -24.382002,32.884686 -24.382002,32.884686 -26.468813,29.984594 -26.468813))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170824T163054_20170824T163119_018067_01E55F_4D7C.zip;POLYGON ((29.576059 -24.96826,29.576059 -22.892105,32.450005 -22.892105,32.450005 -24.96826,29.576059 -24.96826))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170824T163119_20170824T163144_018067_01E55F_B1D6.zip;POLYGON ((29.187778 -23.469753,29.187778 -21.413528,32.014229 -21.413528,32.014229 -23.469753,29.187778 -23.469753))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170905T163029_20170905T163054_018242_01EAB3_1D72.zip;POLYGON ((29.984819 -26.468836,29.984819 -24.382025,32.884911 -24.382025,32.884911 -26.468836,29.984819 -26.468836))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170905T163054_20170905T163119_018242_01EAB3_824E.zip;POLYGON ((29.576281 -24.968283,29.576281 -22.892128,32.450233 -22.892128,32.450233 -24.968283,29.576281 -24.968283))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170905T163119_20170905T163144_018242_01EAB3_EE1F.zip;POLYGON ((29.188171 -23.469778,29.188171 -21.414263,32.01445 -21.414263,32.01445 -23.469778,29.188171 -23.469778))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170917T163030_20170917T163055_018417_01F027_71A4.zip;POLYGON ((29.984346 -26.46866,29.984346 -24.381832,32.884453 -24.381832,32.884453 -26.46866,29.984346 -26.46866))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170917T163055_20170917T163120_018417_01F027_29C4.zip;POLYGON ((29.575785 -24.968113,29.575785 -22.891966,32.449654 -22.891966,32.449654 -24.968113,29.575785 -24.968113))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170917T163120_20170917T163144_018417_01F027_DD2F.zip;POLYGON ((29.187565 -23.469612,29.187565 -21.41375,32.013851 -21.41375,32.013851 -23.469612,29.187565 -23.469612))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170929T163030_20170929T163055_018592_01F57E_8F90.zip;POLYGON ((29.98476 -26.468548,29.98476 -24.381704,32.884789 -24.381704,32.884789 -26.468548,29.98476 -26.468548))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170929T163055_20170929T163120_018592_01F57E_413B.zip;POLYGON ((29.57616 -24.968006,29.57616 -22.891819,32.45005 -22.891819,32.45005 -24.968006,29.57616 -24.968006))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20170929T163120_20170929T163145_018592_01F57E_C4BD.zip;POLYGON ((29.187971 -23.469511,29.187971 -21.413853,32.014305 -21.413853,32.014305 -23.469511,29.187971 -23.469511))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171011T163030_20171011T163055_018767_01FACE_D9A6.zip;POLYGON ((29.984995 -26.468466,29.984995 -24.381641,32.884941 -24.381641,32.884941 -26.468466,29.984995 -26.468466))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171011T163055_20171011T163120_018767_01FACE_7F3F.zip;POLYGON ((29.576385 -24.967926,29.576385 -22.891733,32.450283 -22.891733,32.450283 -24.967926,29.576385 -24.967926))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171011T163120_20171011T163145_018767_01FACE_C785.zip;POLYGON ((29.188074 -23.469435,29.188074 -21.413349,32.014435 -21.413349,32.014435 -23.469435,29.188074 -23.469435))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171023T163030_20171023T163055_018942_020030_064B.zip;POLYGON ((29.984623 -26.468557,29.984623 -24.381655,32.88467 -24.381655,32.88467 -26.468557,29.984623 -26.468557))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171023T163055_20171023T163120_018942_020030_4F4D.zip;POLYGON ((29.576059 -24.967924,29.576059 -22.89188,32.449825 -22.89188,32.449825 -24.967924,29.576059 -24.967924))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171023T163120_20171023T163145_018942_020030_189E.zip;POLYGON ((29.187881 -23.469519,29.187881 -21.413937,32.014027 -21.413937,32.014027 -23.469519,29.187881 -23.469519))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171104T163030_20171104T163055_019117_020586_BC0C.zip;POLYGON ((29.984682 -26.468666,29.984682 -24.381824,32.884624 -24.381824,32.884624 -26.468666,29.984682 -26.468666))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171104T163055_20171104T163120_019117_020586_6B3E.zip;POLYGON ((29.576143 -24.968031,29.576143 -22.892023,32.449902 -22.892023,32.449902 -24.968031,29.576143 -24.968031))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171104T163120_20171104T163145_019117_020586_252B.zip;POLYGON ((29.188166 -23.469624,29.188166 -21.414787,32.014122 -21.414787,32.014122 -23.469624,29.188166 -23.469624))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171116T163030_20171116T163055_019292_020AFB_FCF0.zip;POLYGON ((29.985197 -26.468575,29.985197 -24.381708,32.885162 -24.381708,32.885162 -26.468575,29.985197 -26.468575))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171116T163055_20171116T163120_019292_020AFB_2E25.zip;POLYGON ((29.576632 -24.967945,29.576632 -22.891912,32.450405 -22.891912,32.450405 -24.967945,29.576632 -24.967945))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171116T163120_20171116T163145_019292_020AFB_24EF.zip;POLYGON ((29.188452 -23.469542,29.188452 -21.413971,32.014603 -21.413971,32.014603 -23.469542,29.188452 -23.469542))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171128T163030_20171128T163055_019467_021084_59D7.zip;POLYGON ((29.985559 -26.468468,29.985559 -24.381567,32.885532 -24.381567,32.885532 -26.468468,29.985559 -26.468468))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171128T163055_20171128T163120_019467_021084_D64B.zip;POLYGON ((29.576941 -24.967842,29.576941 -22.891712,32.450657 -22.891712,32.450657 -24.967842,29.576941 -24.967842))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171128T163120_20171128T163144_019467_021084_B29D.zip;POLYGON ((29.188818 -23.469355,29.188818 -21.414129,32.014801 -21.414129,32.014801 -23.469355,29.188818 -23.469355))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171210T163029_20171210T163054_019642_0215F9_1239.zip;POLYGON ((29.985878 -26.468428,29.985878 -24.381552,32.885765 -24.381552,32.885765 -26.468428,29.985878 -26.468428))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171210T163054_20171210T163119_019642_0215F9_DFE3.zip;POLYGON ((29.577259 -24.967804,29.577259 -22.891674,32.450977 -22.891674,32.450977 -24.967804,29.577259 -24.967804))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171210T163119_20171210T163144_019642_0215F9_3C36.zip;POLYGON ((29.189093 -23.469318,29.189093 -21.413916,32.015118 -21.413916,32.015118 -23.469318,29.189093 -23.469318))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171222T163028_20171222T163053_019817_021B5B_628A.zip;POLYGON ((29.985939 -26.468563,29.985939 -24.381695,32.885906 -24.381695,32.885906 -26.468563,29.985939 -26.468563))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171222T163053_20171222T163118_019817_021B5B_5883.zip;POLYGON ((29.577343 -24.967936,29.577343 -22.891836,32.45105 -22.891836,32.45105 -24.967936,29.577343 -24.967936))\nS1A;IW;VV,VH;/geonfs02_vol1/c4bech/01_sentinel/09_KNP/01_data/S1A_IW_GRDH_1SDV_20171222T163118_20171222T163143_019817_021B5B_9DF6.zip;POLYGON ((29.189045 -23.469444,29.189045 -21.413454,32.015217 -21.413454,32.015217 -23.469444,29.189045 -23.469444))\n"
  },
  {
    "path": "tests/data/dem.par",
    "content": "Gamma DIFF&GEO DEM/MAP parameter file\ntitle: alps_dem_gamma_SRTM-1Sec-HGT\nDEM_projection:     UTM\ndata_format:        REAL*4\nDEM_hgt_offset:          0.00000\nDEM_scale:               1.00000\nwidth:                5927\nnlines:               6455\ncorner_north:  5235158.873   m\ncorner_east:    515363.565   m\npost_north:    -20.0000000   m\npost_east:      20.0000000   m\n\nellipsoid_name: WGS 84\nellipsoid_ra:        6378137.000   m\nellipsoid_reciprocal_flattening:  298.2572236\n\ndatum_name: WGS 1984\ndatum_shift_dx:              0.000   m\ndatum_shift_dy:              0.000   m\ndatum_shift_dz:              0.000   m\ndatum_scale_m:         0.00000e+00\ndatum_rotation_alpha:  0.00000e+00   arc-sec\ndatum_rotation_beta:   0.00000e+00   arc-sec\ndatum_rotation_gamma:  0.00000e+00   arc-sec\ndatum_country_list: Global Definition, WGS84, World\n\nprojection_name: UTM\nprojection_zone:                 32\nfalse_easting:           500000.000   m\nfalse_northing:               0.000   m\nprojection_k0:            0.9996000\ncenter_longitude:         9.0000000   decimal degrees\ncenter_latitude:          0.0000000   decimal degrees\n\n"
  },
  {
    "path": "tests/data/mli.par",
    "content": "Gamma Interferometric SAR Processor (ISP) - Image Parameter File\n\ntitle:     S1A-IW-IW-VV-3296 (software: Sentinel-1 IPF 002.36)\nsensor:    S1A IW IW VV\ndate:      2014 11 15 18 18 1.3091\nstart_time:             65881.309050   s\ncenter_time:            65893.808110   s\nend_time:               65906.307169   s\nazimuth_line_time:     1.4954606e-03   s\nline_header_size:                  0\nrange_samples:                 31897\nazimuth_lines:                 16717\nrange_looks:                       5\nazimuth_looks:                     1\nimage_format:               FLOAT\nimage_geometry:             SLANT_RANGE\nrange_scale_factor:     1.0000000e+00\nazimuth_scale_factor:   1.0000000e+00\ncenter_latitude:          37.1843743   degrees\ncenter_longitude:         -4.8856155   degrees\nheading:                 -13.1840654   degrees\nrange_pixel_spacing:        5.000000   m\nazimuth_pixel_spacing:     10.003560   m\nnear_range_slc:           800286.7867  m\ncenter_range_slc:         880026.7867  m\nfar_range_slc:            959766.7867  m\nfirst_slant_range_polynomial:        0.00000      0.00000  0.00000e+00  0.00000e+00  0.00000e+00  0.00000e+00  s m 1 m^-1 m^-2 m^-3 \ncenter_slant_range_polynomial:       0.00000      0.00000  0.00000e+00  0.00000e+00  0.00000e+00  0.00000e+00  s m 1 m^-1 m^-2 m^-3 \nlast_slant_range_polynomial:         0.00000      0.00000  0.00000e+00  0.00000e+00  0.00000e+00  0.00000e+00  s m 1 m^-1 m^-2 m^-3 \nincidence_angle:             38.9988   degrees\nazimuth_deskew:          ON\nazimuth_angle:               90.0000   degrees\nradar_frequency:        5.4050005e+09  Hz\nadc_sampling_rate:      6.4345238e+07  Hz\nchirp_bandwidth:        5.6500000e+07  Hz\nprf:                      668.6903123  Hz\nazimuth_proc_bandwidth:     327.00000  Hz\ndoppler_polynomial:        -11.16383 -6.56197e-05  7.59030e-10  0.00000e+00  Hz     Hz/m     Hz/m^2     Hz/m^3\ndoppler_poly_dot:        0.00000e+00  0.00000e+00  0.00000e+00  0.00000e+00  Hz/s   Hz/s/m   Hz/s/m^2   Hz/s/m^3\ndoppler_poly_ddot:       0.00000e+00  0.00000e+00  0.00000e+00  0.00000e+00  Hz/s^2 Hz/s^2/m Hz/s^2/m^2 Hz/s^2/m^3\nreceiver_gain:                 0.0000  dB\ncalibration_gain:              0.0000  dB\nsar_to_earth_center:             7070852.9213   m\nearth_radius_below_sensor:       6370467.2353   m\nearth_semi_major_axis:           6378137.0000   m\nearth_semi_minor_axis:           6356752.3141   m\nnumber_of_state_vectors:                   15\ntime_of_first_state_vector:      65824.000000   s\nstate_vector_interval:              10.000000   s\nstate_vector_position_1:   5924711.4180    -966420.2665    3737790.5072   m   m   m\nstate_vector_velocity_1:    -4144.02916     -1043.28697      6279.00570   m/s m/s m/s\nstate_vector_position_2:   5882932.1764    -976768.3769    3800368.2816   m   m   m\nstate_vector_velocity_2:    -4211.73676     -1026.29936      6236.43151   m/s m/s m/s\nstate_vector_position_3:   5840478.3441    -986945.5426    3862516.7916   m   m   m\nstate_vector_velocity_3:    -4278.94597     -1009.09848      6193.15365   m/s m/s m/s\nstate_vector_position_4:   5797354.9443    -996949.6443    3924229.0253   m   m   m\nstate_vector_velocity_4:    -4345.64895      -991.68701      6149.17708   m/s m/s m/s\nstate_vector_position_5:   5753567.0788   -1006778.5897    3985498.0206   m   m   m\nstate_vector_velocity_5:    -4411.83787      -974.06767      6104.50682   m/s m/s m/s\nstate_vector_position_6:   5709119.9266   -1016430.3138    4046316.8661   m   m   m\nstate_vector_velocity_6:    -4477.50500      -956.24320      6059.14797   m/s m/s m/s\nstate_vector_position_7:   5664018.7442   -1025902.7791    4106678.7017   m   m   m\nstate_vector_velocity_7:    -4542.64266      -938.21637      6013.10570   m/s m/s m/s\nstate_vector_position_8:   5618268.8641   -1035193.9760    4166576.7197   m   m   m\nstate_vector_velocity_8:    -4607.24325      -919.99000      5966.38528   m/s m/s m/s\nstate_vector_position_9:   5571875.6949   -1044301.9234    4226004.1649   m   m   m\nstate_vector_velocity_9:    -4671.29921      -901.56693      5918.99202   m/s m/s m/s\nstate_vector_position_10:   5524844.7202   -1053224.6686    4284954.3358   m   m   m\nstate_vector_velocity_10:    -4734.80307      -882.95004      5870.93135   m/s m/s m/s\nstate_vector_position_11:   5477181.4982   -1061960.2877    4343420.5855   m   m   m\nstate_vector_velocity_11:    -4797.74742      -864.14223      5822.20873   m/s m/s m/s\nstate_vector_position_12:   5428891.6610   -1070506.8864    4401396.3223   m   m   m\nstate_vector_velocity_12:    -4860.12490      -845.14645      5772.82972   m/s m/s m/s\nstate_vector_position_13:   5379980.9136   -1078862.5998    4458875.0103   m   m   m\nstate_vector_velocity_13:    -4921.92827      -825.96568      5722.79995   m/s m/s m/s\nstate_vector_position_14:   5330455.0334   -1087025.5930    4515850.1705   m   m   m\nstate_vector_velocity_14:    -4983.15030      -806.60292      5672.12514   m/s m/s m/s\nstate_vector_position_15:   5280319.8694   -1094994.0613    4572315.3816   m   m   m\nstate_vector_velocity_15:    -5043.78388      -787.06120      5620.81106   m/s m/s m/s\n\n"
  },
  {
    "path": "tests/installtest_gdal_geos.py",
    "content": "from osgeo import ogr\n\n\n# test whether GDAL was successfully built with GEOS support\nwkt1 = 'POLYGON ((' \\\n       '1208064.271243039 624154.6783778917, ' \\\n       '1208064.271243039 601260.9785661874, ' \\\n       '1231345.9998651114 601260.9785661874, ' \\\n       '1231345.9998651114 624154.6783778917, ' \\\n       '1208064.271243039 624154.6783778917' \\\n       '))'\n\nwkt2 = 'POLYGON ((' \\\n       '1199915.6662253144 633079.3410163528, ' \\\n       '1199915.6662253144 614453.958118695, ' \\\n       '1219317.1067437078 614453.958118695, ' \\\n       '1219317.1067437078 633079.3410163528, ' \\\n       '1199915.6662253144 633079.3410163528' \\\n       '))'\n\npoly1 = ogr.CreateGeometryFromWkt(wkt1)\npoly2 = ogr.CreateGeometryFromWkt(wkt2)\n\nintersection = poly1.Intersection(poly2)\n"
  },
  {
    "path": "tests/installtest_ogr_sqlite.py",
    "content": "from osgeo import ogr\n\ndriver = ogr.GetDriverByName('SQLite')\n\nif driver is None:\n    raise RuntimeError('OGR was built without SQLite driver')\n"
  },
  {
    "path": "tests/installtest_spatialite.py",
    "content": "\ntry:\n    from pysqlite2 import dbapi2 as sqlite3\nexcept ImportError:\n    import sqlite3\n\nprint(sqlite3.__file__)\n\ncon = sqlite3.connect(':memory:')\n\ncon.enable_load_extension(True)\n\ntry:\n    con.load_extension('mod_spatialite')\nexcept sqlite3.OperationalError:\n    con.load_extension('libspatialite')\n"
  },
  {
    "path": "tests/test_ancillary.py",
    "content": "import os\nimport pytest\nimport datetime\nfrom pathlib import Path\nfrom pyroSAR.ancillary import (seconds, groupbyTime, groupby,\n                               parse_datasetname, find_datasets,\n                               Lock, LockCollection)\n\n\ndef test_seconds():\n    assert seconds('test_20151212T234411') == 3658952651.0\n\n\ndef test_groupby():\n    \"\"\"\n    Test correct grouping of filenames by their attributes\n    Methodology is to provide a list of partially overlapping filenames\n    and ensure the resultant list of lists contains the correct entry numbers\n    \"\"\"\n    filenames = ['S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif',\n                 'S1A__IW___A_20150309T173017_HH_grd_mli_geo_norm_db.tif',\n                 'S2A__IW___A_20180309T173017_HH_grd_mli_geo_norm_db.tif']\n    sensor_groups = groupby(filenames, 'sensor')\n    print(sensor_groups)\n    assert len(sensor_groups) == 2\n    assert isinstance(sensor_groups[0], list)\n    assert len(sensor_groups[0]) == 2\n    \n    filenames += ['S2A__IW___A_20180309T173017_VV_grd_mli_geo_norm_db.tif']\n    \n    polarization_groups = groupby(filenames, 'polarization')\n    print(polarization_groups)\n    assert len(polarization_groups) == 2\n    assert isinstance(polarization_groups[0], list)\n    assert isinstance(polarization_groups[1], list)\n    assert len(polarization_groups[0]) == 2\n    assert len(polarization_groups[1]) == 2\n    \n    filenames += ['S2A__IW___A_20180309T173017_HV_grd_mli_geo_norm_db.tif']\n    \n    polarization_groups = groupby(filenames, 'polarization')\n    print(polarization_groups)\n    assert len(polarization_groups) == 3\n    assert isinstance(polarization_groups[0], list)\n    assert isinstance(polarization_groups[1], list)\n    assert isinstance(polarization_groups[2], list)\n    assert len(polarization_groups[0]) == 2\n    assert len(polarization_groups[1]) == 1\n    assert len(polarization_groups[2]) == 2\n\n\ndef test_groupbyTime():\n    filenames = ['S1__IW___A_20151212T120000',\n                 'S1__IW___A_20151212T120100',\n                 'S1__IW___A_20151212T120300']\n    groups = groupbyTime(filenames, seconds, 60)\n    print(groups)\n    assert len(groups) == 2\n    assert isinstance(groups[0], list)\n    assert len(groups[0]) == 2\n    \n    filenames = ['S1__IW___A_20151212T120000',\n                 'S1__IW___A_20151212T120100',\n                 'S1__IW___A_20151212T120200']\n    groups = groupbyTime(filenames, seconds, 60)\n    print(groups)\n    assert len(groups[0]) == 3\n\n\ndef test_parse_datasetname():\n    assert parse_datasetname('foobar') is None\n    filename = 'S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'\n    meta = parse_datasetname(filename, parse_date=True)\n    assert sorted(meta.keys()) == ['acquisition_mode', 'extensions', 'filename',\n                                   'filetype', 'orbit', 'outname_base',\n                                   'polarization', 'proc_steps', 'sensor', 'start']\n    assert meta['acquisition_mode'] == 'IW'\n    assert meta['extensions'] is None\n    assert meta['filename'] == filename\n    assert meta['orbit'] == 'A'\n    assert meta['outname_base'] == 'S1A__IW___A_20150309T173017'\n    assert meta['polarization'] == 'VV'\n    assert meta['proc_steps'] == ['grd', 'mli', 'geo', 'norm', 'db']\n    assert meta['sensor'] == 'S1A'\n    assert meta['start'] == datetime.datetime(2015, 3, 9, 17, 30, 17)\n    meta = parse_datasetname('S1A__IW___A_20150309T173017_VV_grd.tif')\n    assert meta['proc_steps'] == ['grd']\n    \n    meta1 = parse_datasetname('S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif')\n    meta2 = parse_datasetname('S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db')\n    meta3 = parse_datasetname('S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.nc')\n    \n    assert meta1['filetype'] == '.tif'\n    assert meta2['filetype'] == ''\n    assert meta3['filetype'] == '.nc'\n    \n    for k in meta1.keys():\n        if k not in ['filename', 'filetype']:\n            assert meta1[k] == meta2[k]\n            assert meta1[k] == meta3[k]\n    \n    filename = 'S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'\n    expectation = {'outname_base': 'S1A__IW___A_20150309T173017', 'sensor': 'S1A', 'acquisition_mode': 'IW',\n                   'orbit': 'A', 'start': '20150309T173017', 'extensions': None, 'polarization': 'VV',\n                   'proc_steps': ['grd', 'mli', 'geo', 'norm', 'db'], 'filetype': '.tif',\n                   'filename': 'S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'}\n    assert parse_datasetname(filename) == expectation\n    \n    filename = 'S1A__IW___A_20150309T173017_149_abc_VV_grd_mli_geo_norm_db.tif'\n    expectation = {'outname_base': 'S1A__IW___A_20150309T173017_149_abc', 'sensor': 'S1A', 'acquisition_mode': 'IW',\n                   'orbit': 'A', 'start': '20150309T173017', 'extensions': '149_abc', 'polarization': 'VV',\n                   'proc_steps': ['grd', 'mli', 'geo', 'norm', 'db'], 'filetype': '.tif',\n                   'filename': 'S1A__IW___A_20150309T173017_149_abc_VV_grd_mli_geo_norm_db.tif'}\n    assert parse_datasetname(filename) == expectation\n    \n    filename = 'S1A__IW___A_20150309T173017_149_inc_geo.tif'\n    expectation = {'outname_base': 'S1A__IW___A_20150309T173017_149_inc_geo', 'sensor': 'S1A', 'acquisition_mode': 'IW',\n                   'orbit': 'A', 'start': '20150309T173017', 'extensions': '149_inc_geo', 'polarization': None,\n                   'proc_steps': None, 'filetype': '.tif', 'filename': 'S1A__IW___A_20150309T173017_149_inc_geo.tif'}\n    assert parse_datasetname(filename) == expectation\n\n\ndef test_find_datasets(testdir):\n    assert len(find_datasets(testdir, sensor='S1A')) == 1\n    assert len(find_datasets(testdir, sensor='S1B')) == 0\n\n\ndef test_lock(tmpdir):\n    f1 = str(tmpdir / 'test1.txt')\n    f2 = str(tmpdir / 'test2.txt')\n    Path(f1).touch()\n    Path(f2).touch()\n    \n    # simple nested write-locking\n    with Lock(f1):\n        with Lock(f1):\n            assert os.path.isfile(f1 + '.lock')\n        assert os.path.isfile(f1 + '.lock')\n    assert not os.path.isfile(f1 + '.lock')\n    \n    # simple nested read-locking\n    with Lock(f1, soft=True) as l1:\n        used = l1.used\n        with Lock(f1, soft=True):\n            assert os.path.isfile(used)\n        assert os.path.isfile(used)\n    assert not os.path.isfile(used)\n    \n    # separate instances for different files\n    with Lock(f1):\n        with Lock(f2):\n            assert os.path.isfile(f2 + '.lock')\n        assert os.path.isfile(f1 + '.lock')\n    \n    # combination of nested locking, multiple instances, and LockCollection\n    with LockCollection([f1, f2]):\n        with LockCollection([f1, f2]):\n            assert os.path.isfile(f1 + '.lock')\n            assert os.path.isfile(f2 + '.lock')\n        with Lock(f2):\n            assert os.path.isfile(f1 + '.lock')\n            assert os.path.isfile(f2 + '.lock')\n        assert os.path.isfile(f1 + '.lock')\n        assert os.path.isfile(f2 + '.lock')\n    assert not os.path.isfile(f1 + '.lock')\n    assert not os.path.isfile(f2 + '.lock')\n    \n    # nested locking does not work if the `soft` argument changes\n    with Lock(f1):\n        with pytest.raises(RuntimeError):\n            with Lock(f1, soft=True):\n                assert os.path.isfile(f1 + '.lock')\n    \n    # cannot nest write-lock in read-lock\n    with Lock(f1, soft=True):\n        with pytest.raises(RuntimeError):\n            with Lock(f1):\n                assert os.path.isfile(f1 + '.lock')\n    \n    # not using the context manager requires manual lock removal\n    lock = Lock(f1)\n    try:\n        raise RuntimeError\n    except RuntimeError as e:\n        lock.remove(exc_type=type(e))\n    assert os.path.isfile(f1 + '.error')\n    # cannot acquire lock on a damaged target\n    with pytest.raises(RuntimeError):\n        lock = Lock(f1)\n    os.remove(f1 + '.error')\n    \n    # cannot acquire lock if there is a (simulated) lock\n    Path(f1 + '.lock').touch()\n    with pytest.raises(RuntimeError):\n        lock2 = Lock(f1, timeout=5)\n    os.remove(f1 + '.lock')\n"
  },
  {
    "path": "tests/test_archive.py",
    "content": "import os\n\nfrom sqlalchemy import Table, MetaData, Column, Integer, String\nfrom geoalchemy2 import Geometry\n\nfrom pyroSAR.drivers import identify\nfrom pyroSAR.archive import Archive, drop_archive\n\nfrom spatialist.vector import Vector\n\nfrom shapely import wkt\n\nimport pytest\n\nmetadata = MetaData()\n\nmytable = Table('mytable', metadata,\n                Column('mytable_id', Integer, primary_key=True),\n                Column('value', String(50)),\n                Column('shape', Geometry(geometry_type='POLYGON',\n                                         management=True, srid=4326)))\n\n\ndef test_archive(tmpdir, testdata):\n    id = identify(testdata['s1'])\n    dbfile = os.path.join(str(tmpdir), 'scenes.db')\n    db = Archive(dbfile)\n    db.insert(testdata['s1'])\n    assert all(isinstance(x, str) for x in db.get_tablenames())\n    assert all(isinstance(x, str) for x in db.get_colnames())\n    assert db.is_registered(testdata['s1']) is True\n    assert len(db.get_unique_directories()) == 1\n    assert db.select_duplicates() == []\n    assert db.select_duplicates(outname_base='S1A__IW___A_20150222T170750', scene='scene.zip') == []\n    assert len(db.select(mindate='20141001T192312', maxdate='20201001T192312')) == 1\n    assert len(db.select(polarizations=['VV'])) == 1\n    assert len(db.select(vectorobject=id.bbox())) == 1\n    assert len(db.select(sensor='S1A', vectorobject='foo', processdir=str(tmpdir))) == 1\n    assert len(db.select(sensor='S1A', mindate='foo', maxdate='bar', foobar='foobar')) == 1\n    out = db.select(vv=1, acquisition_mode=['IW', 'EW'])\n    assert len(out) == 1\n    assert isinstance(out[0], str)\n    \n    out = db.select(vv=1, return_value=['mindate', 'geometry_wkt', 'geometry_wkb'])\n    assert len(out) == 1\n    assert isinstance(out[0], tuple)\n    assert out[0][0] == '20150222T170750'\n    geom = wkt.loads('POLYGON(('\n                     '8.505644 50.295261, 12.0268 50.688881, '\n                     '11.653832 52.183979, 8.017178 51.788181, '\n                     '8.505644 50.295261))')\n    assert wkt.loads(out[0][1]) == geom\n    assert out[0][2] == geom.wkb\n    \n    with pytest.raises(ValueError):\n        out = db.select(vv=1, return_value=['foobar'])\n    \n    db.insert(testdata['s1_3'])\n    db.insert(testdata['s1_4'])\n    db.drop_element(testdata['s1_3'])\n    assert db.size == (2, 0)\n    db.drop_element(testdata['s1_4'])\n    \n    db.add_tables(mytable)\n    assert 'mytable' in db.get_tablenames()\n    with pytest.raises(TypeError):\n        db.filter_scenelist([1])\n    db.close()\n\n\ndef test_archive2(tmpdir, testdata):\n    dbfile = os.path.join(str(tmpdir), 'scenes.db')\n    with Archive(dbfile) as db:\n        db.insert(testdata['s1'])\n        assert db.size == (1, 0)\n        shp = os.path.join(str(tmpdir), 'db.shp')\n        db.export2shp(shp)\n    \n    os.remove(dbfile)\n    assert not os.path.isfile(dbfile)\n    assert Vector(shp).nfeatures == 1\n    \n    with Archive(dbfile) as db:\n        with pytest.raises(OSError):\n            db.import_outdated(testdata['archive_old_csv'])\n        with pytest.raises(RuntimeError):\n            db.import_outdated('foobar')\n    \n    # the archive_old_bbox database contains a relative file name for the scene\n    # so that it can be reimported into the new database. The working directory\n    # is changed temporarily so that the scene can be found.\n    cwd = os.getcwd()\n    folder = os.path.dirname(os.path.realpath(__file__))\n    os.chdir(os.path.join(folder, 'data'))\n    with Archive(dbfile) as db:\n        with Archive(testdata['archive_old_bbox'], legacy=True) as db_old:\n            db.import_outdated(db_old)\n    os.chdir(cwd)\n    \n    with pytest.raises(RuntimeError):\n        db = Archive(testdata['archive_old_csv'])\n    with pytest.raises(RuntimeError):\n        db = Archive(testdata['archive_old_bbox'])\n\n\ndef test_archive_postgres(tmpdir, testdata):\n    pguser = os.environ.get('PGUSER')\n    pgpassword = os.environ.get('PGPASSWORD')\n    pgport = os.environ.get('PGPORT')\n    if pgport is not None:\n        pgport = int(pgport)\n    else:\n        pgport = 5432\n    \n    id = identify(testdata['s1'])\n    db = Archive('test', postgres=True, port=pgport, user=pguser, password=pgpassword)\n    db.insert(testdata['s1'])\n    assert all(isinstance(x, str) for x in db.get_tablenames())\n    assert all(isinstance(x, str) for x in db.get_colnames())\n    assert db.is_registered(testdata['s1']) is True\n    assert len(db.get_unique_directories()) == 1\n    assert db.select_duplicates() == []\n    assert db.select_duplicates(outname_base='S1A__IW___A_20150222T170750', scene='scene.zip') == []\n    assert len(db.select(mindate='20141001T192312', maxdate='20201001T192312')) == 1\n    assert len(db.select(polarizations=['VV'])) == 1\n    assert len(db.select(vectorobject=id.bbox())) == 1\n    assert len(db.select(sensor='S1A', vectorobject='foo', processdir=str(tmpdir))) == 1\n    assert len(db.select(sensor='S1A', mindate='foo', maxdate='bar', foobar='foobar')) == 1\n    out = db.select(vv=1, acquisition_mode=('IW', 'EW'))\n    assert len(out) == 1\n    assert isinstance(out[0], str)\n    \n    out = db.select(vv=1, return_value=['scene', 'start'])\n    assert len(out) == 1\n    assert isinstance(out[0], tuple)\n    assert out[0][1] == '20150222T170750'\n    \n    with pytest.raises(ValueError):\n        out = db.select(vv=1, return_value=['foobar'])\n    \n    db.add_tables(mytable)\n    assert 'mytable' in db.get_tablenames()\n    with pytest.raises(TypeError):\n        db.filter_scenelist([1])\n    db.close()\n    with Archive('test', postgres=True, port=pgport,\n                 user=pguser, password=pgpassword) as db:\n        assert db.size == (1, 0)\n        shp = os.path.join(str(tmpdir), 'db.shp')\n        db.export2shp(shp)\n        drop_archive(db)\n    assert Vector(shp).nfeatures == 1\n    \n    with Archive('test', postgres=True, port=pgport,\n                 user=pguser, password=pgpassword) as db:\n        with pytest.raises(OSError):\n            db.import_outdated(testdata['archive_old_csv'])\n        drop_archive(db)\n    \n    # the archive_old_bbox database contains a relative file name for the scene\n    # so that it can be reimported into the new database. The working directory\n    # is changed temporarily so that the scene can be found.\n    cwd = os.getcwd()\n    folder = os.path.dirname(os.path.realpath(__file__))\n    os.chdir(os.path.join(folder, 'data'))\n    with Archive('test', postgres=True, port=pgport,\n                 user=pguser, password=pgpassword) as db:\n        with Archive(testdata['archive_old_bbox'], legacy=True) as db_old:\n            db.import_outdated(db_old)\n        drop_archive(db)\n    os.chdir(cwd)\n    \n    dbfile = os.path.join(str(tmpdir), 'scenes.db')\n    with Archive('test', postgres=True, port=pgport,\n                 user=pguser, password=pgpassword) as db:\n        with Archive(dbfile, legacy=True) as db_sqlite:\n            db.import_outdated(db_sqlite)\n        drop_archive(db)\n    \n    with pytest.raises(SystemExit) as pytest_wrapped_e:\n        Archive('test', postgres=True, user='hello_world', port=7080)\n    assert pytest_wrapped_e.type == SystemExit\n"
  },
  {
    "path": "tests/test_auxdata.py",
    "content": "import os\nimport pytest\nfrom pyroSAR.auxdata import dem_autoload, DEMHandler, dem_create\n\nfrom spatialist import bbox\n\n\ndef test_handler(auxdata_dem_cases):\n    with bbox({'xmin': 11.5, 'xmax': 11.9, 'ymin': 51.1, 'ymax': 51.5}, crs=4326) as box:\n        with DEMHandler([box]) as handler:\n            assert isinstance(handler.auxdatapath, str)\n            for demType, reference in auxdata_dem_cases:\n                result = handler.remote_ids(dem_type=demType, extent=box.extent)\n                assert result == reference\n    \n    with bbox({'xmin': -58.9, 'xmax': -58.5, 'ymin': -51.5, 'ymax': -51.1}, crs=4326) as box:\n        with DEMHandler([box]) as handler:\n            cases = [('AW3D30', ['S055W060/S052W059.tar.gz']),\n                     ('SRTM 1Sec HGT', ['https://step.esa.int/auxdata/dem/SRTMGL1/S52W059.SRTMGL1.hgt.zip']),\n                     ('SRTM 3Sec', ['https://step.esa.int/auxdata/dem/SRTM90/tiff/srtm_25_23.zip'])\n                     ]\n            for demType, reference in cases:\n                result = handler.remote_ids(dem_type=demType, extent=box.extent)\n                assert result == reference\n    with pytest.raises(RuntimeError):\n        test = DEMHandler('foobar')\n    ext_utm = {'xmin': -955867, 'xmax': -915536, 'ymin': -5915518, 'ymax': -5863678}\n    with bbox(ext_utm, crs=32632) as box:\n        with pytest.raises(RuntimeError):\n            test = DEMHandler([box])\n\n\ndef test_autoload(auxdata_dem_cases, travis):\n    with bbox({'xmin': 11.5, 'xmax': 11.9, 'ymin': 51, 'ymax': 51.5}, crs=4326) as box:\n        # if the following is run in a loop, it is not possible to see which demType failed\n        # Travis CI does not support ftp access;\n        # see https://blog.travis-ci.com/2018-07-23-the-tale-of-ftp-at-travis-ci\n        if not travis:\n            files = dem_autoload([box], 'AW3D30')\n            assert len(files) == 1\n            files = dem_autoload([box], 'AW3D30', product='stk')\n            assert len(files) == 1\n        files = dem_autoload([box], 'SRTM 1Sec HGT')\n        assert len(files) == 1\n        files = dem_autoload([box], 'SRTM 1Sec HGT', offline=True)\n        assert len(files) == 1\n        files = dem_autoload([box], 'SRTM 3Sec')\n        assert len(files) == 1\n        with pytest.raises(RuntimeError):\n            files = dem_autoload([box], 'TDX90m')\n        with pytest.raises(RuntimeError):\n            dem_autoload([box], 'AW3D30', product='foobar')\n    with bbox({'xmin': -30, 'xmax': -29, 'ymin': 40, 'ymax': 41}, crs=4326) as box:\n        files = dem_autoload([box], 'SRTM 1Sec HGT')\n        assert len(files) == 0\n\n\ndef test_dem_create(tmpdir):\n    with bbox({'xmin': 11.5, 'xmax': 11.9, 'ymin': 51, 'ymax': 51.5}, crs=4326) as box:\n        with pytest.raises(RuntimeError):\n            files = dem_autoload([box], 'foobar')\n        vrt = '/vsimem/test.vrt'\n        dem_autoload([box], 'SRTM 3Sec', vrt=vrt)\n    out = os.path.join(str(tmpdir), 'srtm.tif')\n    dem_create(src=vrt, dst=out, t_srs=32632, tr=(90, 90), nodata=-32767)\n    assert os.path.isfile(out)\n\n\ndef test_intrange():\n    ext = {'xmin': 11, 'xmax': 12,\n           'ymin': 51, 'ymax': 51.5}\n    with bbox(ext, 4326) as box:\n        with DEMHandler([box]) as dem:\n            ref1 = range(51, 52), range(11, 12)\n            ref5 = range(50, 55, 5), range(10, 15, 5)\n            ref15 = range(45, 60, 15), range(0, 15, 15)\n            assert dem.intrange(box.extent, 1) == ref1\n            assert dem.intrange(box.extent, 5) == ref5\n            assert dem.intrange(box.extent, 15) == ref15\n"
  },
  {
    "path": "tests/test_config.py",
    "content": "from pyroSAR.config import ConfigHandler\nimport os\nimport pytest\n\n\nclass TestConfigHandler:\n    \n    def test_make_dir_and_config(self, tmpdir):\n        conf = ConfigHandler()\n        \n        path_pyrosar = os.path.exists(conf._ConfigHandler__GLOBAL['path'])\n        path_config = os.path.isfile(conf._ConfigHandler__GLOBAL['config'])\n        \n        assert path_pyrosar is True\n        assert path_config is True\n    \n    def test_add_section(self):\n        conf = ConfigHandler()\n        conf.add_section('FOO')\n        \n        assert 'FOO' in conf.sections\n    \n    def test_options(self, tmpdir):\n        conf = ConfigHandler()\n        conf.set('FOO', 'bar', 'foobar')\n        \n        # cannot set attribute for section that does not exist\n        with pytest.raises(AttributeError):\n            conf.set('SNAPp', 'etc', 'temp/dir')\n        \n        assert conf['FOO']['bar'] == 'foobar'\n        assert conf['FOO'] == {'bar': 'foobar'}\n    \n    def test_overwrite(self, tmpdir):\n        conf = ConfigHandler()\n        \n        with pytest.raises(RuntimeError):\n            conf.set('FOO', 'bar', 'loremipsum')\n        \n        conf.set('FOO', 'bar', 'loremipsum', overwrite=True)\n        assert conf['FOO']['bar'] == 'loremipsum'\n    \n    def test_remove(self, tmpdir):\n        conf = ConfigHandler()\n        \n        with pytest.raises(AttributeError):\n            conf.remove_option('SNAP', 'kex')\n        \n        with pytest.raises(AttributeError):\n            conf.remove_option('SNApP', 'etc')\n        \n        conf.remove_option('FOO', 'bar')\n        assert list(conf['FOO'].keys()) == []\n        \n        conf.remove_section('FOO')\n"
  },
  {
    "path": "tests/test_drivers.py",
    "content": "import pyroSAR\n\nimport pytest\nimport platform\nimport tarfile as tf\nimport os\nfrom datetime import datetime\nfrom spatialist import Vector\nfrom sqlalchemy import Table, MetaData, Column, Integer, String\nfrom geoalchemy2 import Geometry\n\nmetadata = MetaData()\n\nmytable = Table('mytable', metadata,\n                Column('mytable_id', Integer, primary_key=True),\n                Column('value', String(50)),\n                Column('shape', Geometry('POLYGON', management=True, srid=4326)))\n\n\n@pytest.fixture()\ndef testcases():\n    cases = {\n        's1': {\n            'acquisition_mode': 'IW',\n            'bbox_area': 7.573045244595988,\n            'compression': 'zip',\n            'corners': {'ymax': 52.183979, 'ymin': 50.295261, 'xmin': 8.017178, 'xmax': 12.0268},\n            'hgt_len': 15,\n            'lines': 16685,\n            'orbit': 'A',\n            'outname': 'S1A__IW___A_20150222T170750',\n            'polarizations': ['VV', 'VH'],\n            'product': 'GRD',\n            'samples': 25368,\n            'sensor': 'S1A',\n            'spacing': (10.0, 9.998647),\n            'start': '20150222T170750',\n            'stop': '20150222T170815'\n        },\n        'psr2': {\n            'acquisition_mode': 'FBD',\n            'compression': 'zip',\n            'corners': {'xmin': -62.9005207, 'xmax': -62.1629744, 'ymin': -11.4233051, 'ymax': -10.6783401},\n            'hgt_len': 2,\n            'lines': 13160,\n            'orbit': 'A',\n            'outname': 'PSR2_FBD__A_20140909T043342',\n            'polarizations': ['HH', 'HV'],\n            'product': '1.5',\n            'samples': 12870,\n            'sensor': 'PSR2',\n            'spacing': (6.25, 6.25),\n            'start': '20140909T043342',\n            'stop': '20140909T043352'\n        }\n    }\n    return cases\n\n\n@pytest.fixture\ndef scene(testcases, testdata, request):\n    case = testcases[request.param]\n    case['pyro'] = pyroSAR.identify(testdata[request.param])\n    return case\n\n\nclass Test_Metadata():\n    @pytest.mark.parametrize('scene', ['s1', 'psr2'], indirect=True)\n    def test_attributes(self, scene):\n        assert scene['pyro'].acquisition_mode == scene['acquisition_mode']\n        assert scene['pyro'].compression == scene['compression']\n        assert scene['pyro'].getCorners() == scene['corners']\n        assert scene['pyro'].lines == scene['lines']\n        assert scene['pyro'].outname_base() == scene['outname']\n        assert scene['pyro'].orbit == scene['orbit']\n        assert scene['pyro'].polarizations == scene['polarizations']\n        assert scene['pyro'].product == scene['product']\n        assert scene['pyro'].samples == scene['samples']\n        assert scene['pyro'].start == scene['start']\n        assert scene['pyro'].stop == scene['stop']\n        assert scene['pyro'].sensor == scene['sensor']\n        assert scene['pyro'].spacing == scene['spacing']\n        assert len(scene['pyro'].getHGT()) == scene['hgt_len']\n\n\ndef test_identify_fail(testdir, testdata):\n    with pytest.raises(OSError):\n        pyroSAR.identify(os.path.join(testdir, 'foobar'))\n    with pytest.raises(RuntimeError):\n        pyroSAR.identify(testdata['tif'])\n\n\ndef test_identify_many_fail(testdata):\n    assert pyroSAR.identify_many([testdata['tif']]) == []\n\n\ndef test_filter_processed(tmpdir, testdata):\n    scene = pyroSAR.identify(testdata['s1'])\n    assert len(pyroSAR.filter_processed([scene], str(tmpdir))) == 1\n\n\ndef test_parse_date():\n    with pytest.raises(ValueError):\n        print(pyroSAR.parse_date(1))\n    with pytest.raises(ValueError):\n        print(pyroSAR.parse_date('foobar'))\n    assert pyroSAR.parse_date(datetime(2006, 11, 21)) == '20061121T000000'\n\n\ndef test_export2dict():\n    pass\n\n\ndef test_getFileObj(tmpdir, testdata):\n    scene = pyroSAR.identify(testdata['s1'])\n    if platform.system() == 'Windows':\n        directory = u'\\\\\\\\?\\\\' + str(tmpdir)\n    else:\n        directory = str(tmpdir)\n    scene.unpack(directory)\n    scene = pyroSAR.identify(scene.scene)\n    item = scene.findfiles('manifest.safe')[0]\n    assert os.path.basename(item) == 'manifest.safe'\n    assert isinstance(scene.getFileObj(item).read(), (bytes, str))\n    \n    filename = os.path.join(str(tmpdir), os.path.basename(testdata['s1'].replace('zip', 'tar.gz')))\n    with tf.open(filename, 'w:gz') as tar:\n        tar.add(scene.scene, arcname=os.path.basename(scene.scene))\n    # test error if scene is not a directory, zip or tar\n    with pytest.raises(RuntimeError):\n        pyroSAR.getFileObj(scene=os.path.join(scene.scene, 'manifest.safe'), filename='bar')\n    scene = pyroSAR.identify(filename)\n    assert scene.compression == 'tar'\n    item = scene.findfiles('manifest.safe')[0]\n    assert isinstance(scene.getFileObj(item).read(), (bytes, str))\n    with pytest.raises(RuntimeError):\n        pyroSAR.getFileObj('foo', 'bar')\n\n\ndef test_scene(tmpdir, testdata):\n    id = pyroSAR.identify(testdata['s1'])\n    assert isinstance(id.export2dict(), dict)\n    with pytest.raises(RuntimeError):\n        assert isinstance(id.gdalinfo(), dict)\n    id.summary()\n    id.bbox(outname=os.path.join(str(tmpdir), 'bbox_test.shp'), overwrite=True)\n    assert id.is_processed(str(tmpdir)) is False\n    id.unpack(str(tmpdir), overwrite=True)\n    assert id.compression is None\n    with pytest.raises(RuntimeError):\n        id.getGammaImages()\n    assert id.getGammaImages(id.scene) == []\n    id = pyroSAR.identify(testdata['psr2'])\n    assert id.getCorners() == {'xmax': -62.1629744, 'xmin': -62.9005207,\n                               'ymax': -10.6783401, 'ymin': -11.4233051}\n\n\ndatasets = ['asar', 'ers1_esa', 'ers1_ceos', 'psr2', 's1']\n\n\n@pytest.mark.parametrize('dataset', datasets)\ndef test_geometry(testdata, dataset):\n    scene = pyroSAR.identify(testdata[dataset])\n    with scene.geometry() as geom:\n        assert isinstance(geom, Vector)\n\n\ndef test_geo_grid(tmpdir, testdata):\n    scene = pyroSAR.identify(testdata['s1'])\n    with scene.geo_grid() as geom:\n        assert isinstance(geom, Vector)\n    out = tmpdir / \"geogrid.gpkg\"\n    scene.geo_grid(outname=str(out))\n    assert out.exists()\n"
  },
  {
    "path": "tests/test_examine.py",
    "content": "import os\nimport pytest\nfrom pyroSAR.examine import ExamineSnap, SnapProperties\n\n\ndef test_snap_config(tmpdir):\n    conf_snap = ExamineSnap()\n    conf = SnapProperties(path=os.path.dirname(conf_snap.etc))\n    path = os.path.join(os.path.expanduser('~'), '.snap', 'etc', 'snap.properties')\n    assert conf.userpath_properties == path\n    conf.userpath = str(tmpdir)\n    assert conf.userpath == tmpdir\n    with pytest.raises(TypeError):\n        conf.userpath = tmpdir\n    with pytest.raises(KeyError):\n        conf['foobar'] = str(tmpdir)\n    ###########################################################################\n    # check that the type is preserved when setting values\n    conf['snap.jai.tileCacheSize'] = 2048\n    assert conf['snap.jai.tileCacheSize'] == 2048\n    assert isinstance(conf['snap.jai.tileCacheSize'], int)\n    \n    conf = SnapProperties(path=os.path.dirname(conf_snap.etc))\n    assert conf['snap.jai.tileCacheSize'] == 2048\n    assert isinstance(conf['snap.jai.tileCacheSize'], int)\n    \n    conf['snap.jai.tileCacheSize'] = 2048.\n    assert isinstance(conf['snap.jai.tileCacheSize'], float)\n    \n    conf = SnapProperties(path=os.path.dirname(conf_snap.etc))\n    assert conf['snap.jai.tileCacheSize'] == 2048.\n    assert isinstance(conf['snap.jai.tileCacheSize'], float)\n    \n    conf['snap.jai.tileCacheSize'] = None\n    assert conf['snap.jai.tileCacheSize'] is None\n    \n    conf = SnapProperties(path=os.path.dirname(conf_snap.etc))\n    assert conf['snap.jai.tileCacheSize'] is None\n    \n    conf['snap.jai.tileCacheSize'] = True\n    assert conf['snap.jai.tileCacheSize'] is True\n    \n    conf = SnapProperties(path=os.path.dirname(conf_snap.etc))\n    assert conf['snap.jai.tileCacheSize'] is True\n    ###########################################################################\n    # check that a path can correctly be written and read\n    conf = SnapProperties(path=os.path.dirname(conf_snap.etc))\n    conf['snap.userdir'] = str(tmpdir / '.snap')\n    \n    conf = SnapProperties(path=os.path.dirname(conf_snap.etc))\n    assert conf['snap.userdir'] == str(tmpdir / '.snap')\n"
  },
  {
    "path": "tests/test_gamma.py",
    "content": "import os\nimport pytest\nfrom pyroSAR.gamma import ISPPar, par2hdr, Namespace, slc_corners, api\n\n\ndef test_par(testdata, tmpdir):\n    with ISPPar(testdata['dempar']) as par:\n        envi = par.envidict()\n        assert envi['map_info'] == ['UTM', '1.0000', '1.0000', 515353.565, 5235168.873, '20.0', '20.0',\n                                    32, 'North', 'WGS-84', 'units=Meters']\n        assert envi['lines'] == 6455\n        assert envi['samples'] == 5927\n        assert envi['interleave'] == 'bsq'\n        assert envi['bands'] == 1\n        assert envi['byte_order'] == 1\n        assert envi['data_type'] == 4\n        assert envi['file_type'] == 'ENVI Standard'\n        hdrfile = os.path.join(str(tmpdir), 'dem.hdr')\n    par2hdr(testdata['dempar'], hdrfile=hdrfile, modifications={'band_names': ['band1']}, nodata=0)\n    assert os.path.isfile(hdrfile)\n    with ISPPar(testdata['mlipar']) as par:\n        ref = '2014-11-15T18:18:01.309050'\n        assert par.date == ref\n        assert par.envidict()['acquisition_time'] == ref + 'Z'\n        print(par)\n\n\ndef test_namespace():\n    n = Namespace(directory='/test', basename='S1A__IW___A_20180829T170656')\n    n.appreciate(['inc_geo', 'ls_map'])\n    assert n.isregistered('inc_geo')\n    assert n.isfile('inc_geo') is False\n    assert n.isappreciated('inc_geo') is True\n    exp1 = os.path.join('/test', 'S1A__IW___A_20180829T170656_inc_geo')\n    exp2 = os.path.join('/test', 'S1A__IW___A_20180829T170656_ls_map')\n    assert n['inc_geo'] == exp1\n    assert n.get('ls_map') == exp2\n    n.depreciate(['inc_geo'])\n    assert n.isappreciated('inc_geo') is False\n    assert n['inc_geo'] == '-'\n    assert n.getall() == {'inc_geo': '-', 'ls_map': exp2}\n    assert n.select(['inc_geo', 'ls_map']) == ['-', exp2]\n    n.depreciate(['dem_seg'])\n    assert n['dem_seg'] == '-'\n\n\n@pytest.mark.skipif('isp' not in dir(api), reason='requires GAMMA installation with module ISP')\ndef test_slc_corners(testdata):\n    print(testdata['dempar'])\n    pts = slc_corners(testdata['mlipar'])\n    assert pts == {'ymin': 36.20859758,\n                   'ymax': 38.11058293,\n                   'xmin': -6.59346425,\n                   'xmax': -3.42811204}\n"
  },
  {
    "path": "tests/test_gamma_args.py",
    "content": "import pytest\nfrom pyroSAR.ancillary import getargs\nfrom pyroSAR.gamma import api\n\n\n@pytest.mark.skipif('diff' not in dir(api), reason='requires GAMMA installation with module DIFF')\ndef test_args_diff():\n    from pyroSAR.gamma.api import diff\n    lookup = {\n        'gc_map': ['DEM', 'DEM_par', 'DEM_seg', 'DEM_seg_par', 'MLI_par',\n                   'OFF_par', 'frame', 'inc', 'lat_ovr', 'logpath', 'lon_ovr',\n                   'lookup_table', 'ls_map', 'ls_mode', 'outdir', 'pix',\n                   'psi', 'r_ovr', 'shellscript', 'sim_sar', 'u', 'v'],\n        'gc_map_grd': ['DEM', 'DEM_par', 'DEM_seg', 'DEM_seg_par', 'GRD_par',\n                       'frame', 'inc', 'lat_ovr', 'logpath', 'lon_ovr',\n                       'lookup_table', 'ls_map', 'ls_mode', 'outdir', 'pix',\n                       'psi', 'r_ovr', 'shellscript', 'sim_sar', 'u', 'v'],\n        'geocode_back': ['data_in', 'data_out', 'dtype', 'interp_mode',\n                         'logpath', 'lookup_table', 'lr_in', 'lr_out',\n                         'nlines_out', 'order', 'outdir', 'shellscript',\n                         'width_in', 'width_out'],\n        'par_EORC_PALSAR_geo': ['CEOS_data', 'CEOS_leader', 'DEM_par', 'MLI',\n                                'MLI_par', 'cal', 'logpath', 'outdir',\n                                'shellscript'],\n        'par_TX_geo': ['DEM_par', 'GEO', 'GeoTIFF', 'MLI_par', 'annotation_XML',\n                       'logpath', 'outdir', 'pol', 'shellscript'],\n        'pixel_area': ['DEM', 'DEM_par', 'MLI_par', 'area_fact', 'inc_map',\n                       'logpath', 'lookup_table', 'ls_map', 'nstep', 'outdir',\n                       'pix_gamma0', 'pix_sigma0', 'shellscript'],\n    }\n    for command, args in lookup.items():\n        assert set(args).issubset(getargs(getattr(diff, command)))\n\n\n@pytest.mark.skipif('disp' not in dir(api), reason='requires GAMMA installation with module DISP')\ndef test_args_disp():\n    from pyroSAR.gamma.api import disp\n    lookup = {\n        'data2geotiff': ['DEM_par', 'GeoTIFF', 'data', 'logpath', 'no_data',\n                         'outdir', 'shellscript', 'type']\n    }\n    for command, args in lookup.items():\n        assert set(args).issubset(getargs(getattr(disp, command)))\n\n\n@pytest.mark.skipif('isp' not in dir(api), reason='requires GAMMA installation with module ISP')\ndef test_args_isp():\n    from pyroSAR.gamma.api import isp\n    lookup = {\n        'MLI_cat': ['degree', 'dtype', 'extrapol', 'interp_mode', 'mflg',\n                    'MLI1', 'MLI1_par', 'MLI2', 'MLI2_par', 'MLI3', 'MLI3_par',\n                    'overlap'],\n        'multi_look': ['MLI', 'MLI_par', 'SLC', 'SLC_par', 'azlks', 'exp',\n                       'loff', 'nlines', 'rlks', 'scale'],\n        'multi_look_MLI': ['MLI_in', 'MLI_in_par', 'MLI_out', 'MLI_out_par',\n                           'azlks', 'loff', 'nlines', 'rlks', 'scale'],\n        'par_ASAR': ['ASAR_ERS_file', 'K_dB', 'output_name'],\n        'par_EORC_PALSAR': ['CEOS_data', 'CEOS_leader', 'SLC', 'SLC_par', 'dtype',\n                            'sc_dB'],\n        'par_ESA_ERS': ['CEOS_DAT', 'CEOS_SAR_leader', 'SLC', 'SLC_par', 'inlist'],\n        'par_S1_GRD': ['GRD', 'GRD_par', 'GeoTIFF', 'MLI', 'MLI_par', 'annotation_XML',\n                       'calibration_XML', 'eflg', 'noise_XML', 'noise_pwr', 'rps'],\n        'par_S1_SLC': ['GeoTIFF', 'SLC', 'SLC_par', 'TOPS_par', 'annotation_XML',\n                       'calibration_XML', 'dtype', 'noise_XML',\n                       'noise_pwr', 'sc_dB'],\n        'par_TX_GRD': ['GRD', 'GRD_par', 'GeoTIFF', 'annotation_XML', 'pol'],\n        'par_TX_SLC': ['COSAR', 'SLC', 'SLC_par', 'annotation_XML', 'dtype', 'pol'],\n        'radcal_MLI': ['CMLI', 'K_dB', 'MLI', 'MLI_par', 'OFF_par', 'ant_flag',\n                       'antenna', 'pix_area', 'refarea_flag',\n                       'rloss_flag', 'sc_dB'],\n        'radcal_PRI': ['GRD', 'GRD_par', 'K_dB', 'PRI', 'PRI_par',\n                       'inc_ref', 'loff', 'nl', 'nr', 'roff'],\n        'radcal_SLC': ['CSLC', 'CSLC_par', 'K_dB', 'SLC', 'SLC_par',\n                       'ant_flag', 'antenna', 'fcase',\n                       'pix_area', 'refarea_flag', 'rloss_flag', 'sc_dB'],\n        'S1_OPOD_vec': ['OPOD', 'SLC_par', 'nstate'],\n        'SLC_deramp_ScanSAR': ['SLC1_tab', 'SLC2_tab', 'mode', 'phflg'],\n        'SLC_mosaic_ScanSAR': ['SLC', 'SLCR_tab', 'SLC_par', 'SLC_tab',\n                               'azlks', 'rlks', 'bflg']\n    }\n    for command, args in lookup.items():\n        default = ['logpath', 'outdir', 'shellscript']\n        assert set(args + default).issubset(getargs(getattr(isp, command)))\n\n\n@pytest.mark.skipif('lat' not in dir(api), reason='requires GAMMA installation with module LAT')\ndef test_args_lat():\n    from pyroSAR.gamma.api import lat\n    lookup = {\n        'linear_to_dB': ['data_in', 'data_out', 'inverse_flag', 'logpath', 'null_value', 'outdir',\n                         'shellscript', 'width'],\n        'product': ['bx', 'by', 'data_1', 'data_2', 'logpath', 'outdir', 'product',\n                    'shellscript', 'wgt_flag', 'width'],\n        'ratio': ['bx', 'by', 'd1', 'd2', 'logpath', 'outdir', 'ratio',\n                  'shellscript', 'wgt_flag', 'width'],\n        'sigma2gamma': ['gamma0', 'inc', 'logpath', 'outdir', 'sigma0', 'shellscript', 'width']\n    }\n    for command, args in lookup.items():\n        assert set(args).issubset(getargs(getattr(lat, command)))\n"
  },
  {
    "path": "tests/test_license.py",
    "content": "import os\nimport re\nfrom datetime import datetime\n\ndef test_license_year():\n    dir_current = os.path.dirname(os.path.abspath(__file__))\n    license = os.path.join(dir_current, '..', 'LICENSE.txt')\n    assert os.path.isfile(license)\n    with open(license, 'r') as f:\n        content = f.read()\n        start, end = re.search('([0-9]{4})-([0-9]{4})', content).groups()\n    year_current = datetime.now().year\n    assert int(start) == 2014\n    assert int(end) == year_current\n"
  },
  {
    "path": "tests/test_osv.py",
    "content": "import os\nimport time\nimport pytest\nfrom pyroSAR import identify\nfrom pyroSAR.S1 import OSV\nfrom datetime import datetime, timedelta\n\n\ndef test_osv_cleanres(tmpdir):\n    with OSV(str(tmpdir)) as osv:\n        assert osv.getLocals('POE') == []\n        assert osv.getLocals('RES') == []\n        now = (datetime.now() - timedelta(hours=10)).strftime('%Y%m%dT%H%M%S')\n        res = osv.catch(sensor='S1A', osvtype='RES', start=now)\n        nfiles = len(res)\n        osv.retrieve(res)\n        osv.clean_res()\n        assert len(osv.getLocals('RES')) == nfiles\n\n\ndef test_scene_osv(tmpdir, testdata):\n    id = identify(testdata['s1_orbit'])\n    osvdir = os.path.join(str(tmpdir), 'osv')\n    id.getOSV(osvdir)\n    with OSV(osvdir) as osv:\n        with pytest.raises(RuntimeError):\n            osv.catch(sensor='S1A', osvtype='XYZ')\n        res = osv.catch(sensor='S1A', osvtype='RES', start=osv.mindate('POE'), stop=osv.maxdate('POE'))\n        assert len(res) == 0\n        \n        assert len(osv.getLocals('POE')) == 1\n        assert len(osv.getLocals('RES')) == 0\n        assert osv.match(sensor=id.sensor, timestamp=id.start, osvtype='POE') is not None\n        assert osv.match(sensor=id.sensor, timestamp=id.start, osvtype=['POE', 'RES']) is not None\n        assert osv.match(sensor=id.sensor, timestamp=id.start, osvtype='RES') is None\n        for item in osv.getLocals('POE')[1:3]:\n            os.remove(item)\n        assert len(osv.getLocals('POE')) == 1\n        res = osv.catch(sensor='S1A', osvtype='RES', start='20210201T00000', stop='20210201T150000', url_option=1)\n        assert len(res) == 11\n        osv.retrieve(res[0:3])\n        assert len(osv.getLocals('RES')) == 3\n        # check retrieving files for the current day (e.g. to ensure that search is not extended to the future)\n        poe = osv.catch(sensor='S1A', osvtype='POE', start=time.strftime('%Y%m%dT%H%M%S'))\n        assert len(poe) == 0\n        # check retrieving files whose start is in the previous month of the search start\n        poe = osv.catch(sensor='S1A', osvtype='POE', start='20220201T163644', stop='20220201T163709')\n        assert len(poe) == 1\n"
  },
  {
    "path": "tests/test_snap.py",
    "content": "#####################################################################\n# Module for testing the functionality of the SNAP processing module\n#####################################################################\nimport os\nimport pytest\nfrom pyroSAR import identify\nfrom pyroSAR.snap import geocode\nfrom spatialist import bbox\nfrom spatialist.ancillary import finder\nfrom pyroSAR.snap.auxil import is_consistent, split, groupbyWorkers, Workflow, parse_recipe\nfrom pyroSAR.examine import ExamineSnap\n\n\ndef test_installation():\n    reg = ExamineSnap()\n    assert os.path.isfile(reg.gpt)\n    for module in ['core', 'desktop', 'rstb', 'opttbx', 'microwavetbx']:\n        version = reg.get_version(module=module)\n        assert isinstance(version, str)\n\n\ndef test_consistency():\n    with parse_recipe('base') as wf:\n        assert is_consistent(wf)\n\n\ndef test_geocode(tmpdir, testdata):\n    scene = testdata['s1']\n    geocode(scene, str(tmpdir), test=True)\n    xmlfile = finder(str(tmpdir), ['*.xml'])[0]\n    tree = Workflow(xmlfile)\n    assert is_consistent(tree) is True\n    groups = groupbyWorkers(xmlfile, 2)\n    assert len(groups) == 4\n    groups2 = groupbyWorkers(xmlfile, 100)\n    assert len(groups2) == 1\n    split(xmlfile, groups)\n    id = identify(scene)\n    basename = '{}_{}'.format(id.outname_base(), tree.suffix())\n    procdir = os.path.join(str(tmpdir), basename)\n    assert os.path.isdir(procdir)\n    tempdir = os.path.join(procdir, 'tmp')\n    assert os.path.isdir(tempdir)\n    parts = finder(tempdir, ['*.xml'])\n    assert len(parts) == 4\n\n\nclass Test_geocode_opts():\n    def test_infile_type(self, tmpdir, testdata):\n        scene = testdata['s1']\n        with pytest.raises(TypeError):\n            geocode(infile=123, outdir=str(tmpdir), test=True)\n        id = identify(scene)\n        geocode(infile=id, outdir=str(tmpdir), test=True)\n    \n    def test_pol(self, tmpdir, testdata):\n        scene = testdata['s1']\n        with pytest.raises(RuntimeError):\n            geocode(scene, str(tmpdir), polarizations=1, test=True)\n        with pytest.raises(RuntimeError):\n            geocode(scene, str(tmpdir), polarizations='foobar', test=True)\n        geocode(scene, str(tmpdir), polarizations='VV', test=True)\n    \n    def test_pol_list(self, tmpdir, testdata):\n        scene = testdata['s1']\n        geocode(scene, str(tmpdir), polarizations=['VV', 'VH'], test=True)\n    \n    def test_geotype(self, tmpdir, testdata):\n        scene = testdata['s1']\n        with pytest.raises(RuntimeError):\n            geocode(scene, str(tmpdir), geocoding_type='foobar', test=True)\n        geocode(scene, str(tmpdir), test=True,\n                geocoding_type='SAR simulation cross correlation')\n    \n    def test_srs(self, tmpdir, testdata):\n        scene = testdata['s1']\n        with pytest.raises(RuntimeError):\n            geocode(scene, str(tmpdir), t_srs='foobar', test=True)\n        geocode(scene, str(tmpdir), t_srs=32632, test=True)\n    \n    def test_scaling(self, tmpdir, testdata):\n        scene = testdata['s1']\n        with pytest.raises(RuntimeError):\n            geocode(scene, str(tmpdir), scaling='foobar', test=True)\n    \n    def test_shp(self, tmpdir, testdata):\n        scene = testdata['s1']\n        ext = {'xmin': 12, 'xmax': 13, 'ymin': 53, 'ymax': 54}\n        with bbox(ext, 4326) as new:\n            with pytest.raises(RuntimeError):\n                geocode(scene, str(tmpdir), shapefile=new, test=True)\n        \n        with identify(scene).bbox() as box:\n            ext = box.extent\n        ext['xmax'] -= 1\n        with bbox(ext, 4326) as new:\n            geocode(scene, str(tmpdir), shapefile=new, test=True)\n    \n    def test_offset(self, tmpdir, testdata):\n        scene = testdata['s1']\n        geocode(scene, str(tmpdir), offset=(100, 100, 0, 0), test=True)\n    \n    def test_export_extra(self, tmpdir, testdata):\n        scene = testdata['s1']\n        with pytest.raises(RuntimeError):\n            geocode(scene, str(tmpdir), test=True,\n                    export_extra=['foobar'])\n        geocode(scene, str(tmpdir), test=True,\n                export_extra=['localIncidenceAngle'])\n    \n    def test_externalDEM(self, tmpdir, testdata):\n        scene = testdata['s1']\n        dem_dummy = testdata['tif']\n        with pytest.raises(RuntimeError):\n            geocode(scene, str(tmpdir), externalDEMFile='foobar', test=True)\n        geocode(scene, str(tmpdir), externalDEMFile=dem_dummy, test=True)\n\n    def test_speckleFilter(self, tmpdir, testdata):\n        scene = testdata['s1']\n        with pytest.raises(ValueError):\n            geocode(scene, str(tmpdir), speckleFilter='foobar', test=True)\n        geocode(scene, str(tmpdir), speckleFilter='Refined Lee', test=True)\n    \n    def test_refarea(self, tmpdir, testdata):\n        scene = testdata['s1']\n        with pytest.raises(ValueError):\n            geocode(scene, str(tmpdir), terrainFlattening=False, refarea='foobar', test=True)\n        geocode(scene, str(tmpdir), terrainFlattening=True, refarea='gamma0', test=True)\n    \n    def test_sliceassembly(self, tmpdir, testdata):\n        scene1 = testdata['s1']\n        scene2 = testdata['s1_2']\n        wf = geocode([scene1, scene2], str(tmpdir), test=True, returnWF=True)\n        for n in range(1, 4):\n            groups = groupbyWorkers(wf, n=n)\n            split(wf, groups)\n"
  },
  {
    "path": "tests/test_snap_exe.py",
    "content": "from contextlib import contextmanager\n\n\n@contextmanager\ndef not_raises(ExpectedException):\n    try:\n        yield\n\n    except ExpectedException:\n        raise AssertionError(\n            \"Did raise exception {0} when it should not!\".format(\n                repr(ExpectedException)\n            )\n        )\n\n    except Exception:\n        raise AssertionError(\n            \"An unexpected exception {0} raised.\".format(repr(Exception))\n        )\n"
  },
  {
    "path": "tests/test_xml_util.py",
    "content": "\nimport os\nimport pytest\nfrom pyroSAR import SAFE\nfrom pyroSAR.xml_util import XMLHandler\n\n\ndef test_handler(tmpdir, testdata):\n    id = SAFE(testdata['s1'])\n    id.unpack(str(tmpdir))\n    testfile = os.path.join(id.scene, 'manifest.safe')\n    xml = XMLHandler(testfile)\n    xml.restoreNamespaces()\n    xml.write(os.path.join(str(tmpdir), 'test.xml'), 'w')\n    with pytest.raises(RuntimeError):\n        xml = XMLHandler(1)\n    with pytest.raises(RuntimeError):\n        xml = XMLHandler('foobar')\n    with open(testfile, 'r') as infile:\n        xml = XMLHandler(infile)\n"
  }
]