Repository: robotology/gym-ignition
Branch: master
Commit: 4bc8e0465eed
Files: 245
Total size: 1.2 MB
Directory structure:
gitextract_2yu_wlwv/
├── .clang-format
├── .docker/
│ ├── Dockerfile
│ ├── base.Dockerfile
│ ├── cicd-devel.Dockerfile
│ ├── cicd-master.Dockerfile
│ ├── docker-compose.yml
│ ├── entrypoint.sh
│ └── setup_virtualenv.sh
├── .github/
│ ├── CODEOWNERS
│ ├── ISSUE_TEMPLATE/
│ │ ├── bug_report.md
│ │ └── feature_request.md
│ ├── release.yml
│ └── workflows/
│ ├── cicd.yml
│ ├── docker.yml
│ └── style.yml
├── .gitignore
├── CMakeLists.txt
├── LICENSE
├── README.md
├── docs/
│ ├── CMakeLists.txt
│ ├── doxygen/
│ │ └── CMakeLists.txt
│ └── sphinx/
│ ├── CMakeLists.txt
│ ├── _templates/
│ │ ├── module.rst_t
│ │ ├── package.rst_t
│ │ ├── toc.rst_t
│ │ ├── versioning.html
│ │ └── versions.html
│ ├── apidoc/
│ │ ├── gym-ignition/
│ │ │ ├── gym_ignition.base.rst
│ │ │ ├── gym_ignition.context.gazebo.rst
│ │ │ ├── gym_ignition.context.rst
│ │ │ ├── gym_ignition.randomizers.model.rst
│ │ │ ├── gym_ignition.randomizers.physics.rst
│ │ │ ├── gym_ignition.randomizers.rst
│ │ │ ├── gym_ignition.rbd.idyntree.rst
│ │ │ ├── gym_ignition.rbd.rst
│ │ │ ├── gym_ignition.rst
│ │ │ ├── gym_ignition.runtimes.rst
│ │ │ ├── gym_ignition.scenario.rst
│ │ │ └── gym_ignition.utils.rst
│ │ ├── gym-ignition-environments/
│ │ │ ├── gym_ignition_environments.models.rst
│ │ │ ├── gym_ignition_environments.randomizers.rst
│ │ │ ├── gym_ignition_environments.rst
│ │ │ └── gym_ignition_environments.tasks.rst
│ │ └── scenario/
│ │ ├── scenario.bindings.rst
│ │ └── scenario.rst
│ ├── breathe/
│ │ ├── core.rst
│ │ └── gazebo.rst
│ ├── conf.py
│ ├── getting_started/
│ │ ├── gym-ignition.rst
│ │ ├── manipulation.rst
│ │ └── scenario.rst
│ ├── index.html
│ ├── index.rst
│ ├── info/
│ │ ├── faq.rst
│ │ └── limitations.rst
│ ├── installation/
│ │ ├── developer.rst
│ │ ├── nightly.rst
│ │ ├── stable.rst
│ │ ├── support_policy.rst
│ │ ├── system_configuration.rst
│ │ └── virtual_environment.rst
│ ├── what/
│ │ ├── what_is_gym_ignition.rst
│ │ └── what_is_scenario.rst
│ └── why/
│ ├── motivations.rst
│ ├── why_gym_ignition.rst
│ ├── why_ignition_gazebo.rst
│ └── why_scenario.rst
├── examples/
│ ├── panda_pick_and_place.py
│ └── python/
│ └── launch_cartpole.py
├── pyproject.toml
├── python/
│ ├── gym_ignition/
│ │ ├── __init__.py
│ │ ├── base/
│ │ │ ├── __init__.py
│ │ │ ├── runtime.py
│ │ │ └── task.py
│ │ ├── context/
│ │ │ ├── __init__.py
│ │ │ └── gazebo/
│ │ │ ├── __init__.py
│ │ │ ├── controllers.py
│ │ │ └── plugin.py
│ │ ├── randomizers/
│ │ │ ├── __init__.py
│ │ │ ├── abc.py
│ │ │ ├── gazebo_env_randomizer.py
│ │ │ ├── model/
│ │ │ │ ├── __init__.py
│ │ │ │ └── sdf.py
│ │ │ └── physics/
│ │ │ ├── __init__.py
│ │ │ └── dart.py
│ │ ├── rbd/
│ │ │ ├── __init__.py
│ │ │ ├── conversions.py
│ │ │ ├── idyntree/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── helpers.py
│ │ │ │ ├── inverse_kinematics_nlp.py
│ │ │ │ ├── kindyncomputations.py
│ │ │ │ └── numpy.py
│ │ │ └── utils.py
│ │ ├── runtimes/
│ │ │ ├── __init__.py
│ │ │ ├── gazebo_runtime.py
│ │ │ └── realtime_runtime.py
│ │ ├── scenario/
│ │ │ ├── __init__.py
│ │ │ ├── model_with_file.py
│ │ │ └── model_wrapper.py
│ │ └── utils/
│ │ ├── __init__.py
│ │ ├── logger.py
│ │ ├── math.py
│ │ ├── misc.py
│ │ ├── resource_finder.py
│ │ ├── scenario.py
│ │ └── typing.py
│ └── gym_ignition_environments/
│ ├── __init__.py
│ ├── models/
│ │ ├── __init__.py
│ │ ├── cartpole.py
│ │ ├── icub.py
│ │ ├── panda.py
│ │ └── pendulum.py
│ ├── randomizers/
│ │ ├── __init__.py
│ │ ├── cartpole.py
│ │ └── cartpole_no_rand.py
│ └── tasks/
│ ├── __init__.py
│ ├── cartpole_continuous_balancing.py
│ ├── cartpole_continuous_swingup.py
│ ├── cartpole_discrete_balancing.py
│ └── pendulum_swingup.py
├── scenario/
│ ├── CMakeLists.txt
│ ├── README.md
│ ├── bindings/
│ │ ├── CMakeLists.txt
│ │ ├── __init__.py
│ │ ├── core/
│ │ │ ├── CMakeLists.txt
│ │ │ └── core.i
│ │ └── gazebo/
│ │ ├── CMakeLists.txt
│ │ ├── gazebo.i
│ │ └── to_gazebo.i
│ ├── cmake/
│ │ ├── AddUninstallTarget.cmake
│ │ ├── AliasImportedTarget.cmake
│ │ ├── FindIgnitionDistribution.cmake
│ │ ├── FindPackageHandleStandardArgs.cmake
│ │ ├── FindPackageMessage.cmake
│ │ ├── FindPython/
│ │ │ └── Support.cmake
│ │ ├── FindPython3.cmake
│ │ ├── FindSphinx.cmake
│ │ ├── FindSphinxApidoc.cmake
│ │ ├── FindSphinxMultiVersion.cmake
│ │ ├── ImportTargetsCitadel.cmake
│ │ ├── ImportTargetsDome.cmake
│ │ ├── ImportTargetsEdifice.cmake
│ │ └── ImportTargetsFortress.cmake
│ ├── deps/
│ │ ├── CMakeLists.txt
│ │ └── clara/
│ │ └── clara.hpp
│ ├── pyproject.toml
│ ├── setup.cfg
│ ├── setup.py
│ └── src/
│ ├── CMakeLists.txt
│ ├── controllers/
│ │ ├── CMakeLists.txt
│ │ ├── include/
│ │ │ └── scenario/
│ │ │ └── controllers/
│ │ │ ├── ComputedTorqueFixedBase.h
│ │ │ ├── Controller.h
│ │ │ └── References.h
│ │ └── src/
│ │ └── ComputedTorqueFixedBase.cpp
│ ├── core/
│ │ ├── CMakeLists.txt
│ │ ├── include/
│ │ │ └── scenario/
│ │ │ └── core/
│ │ │ ├── Joint.h
│ │ │ ├── Link.h
│ │ │ ├── Model.h
│ │ │ ├── World.h
│ │ │ └── utils/
│ │ │ ├── Log.h
│ │ │ ├── signals.h
│ │ │ └── utils.h
│ │ └── src/
│ │ ├── signals.cpp
│ │ └── utils.cpp
│ ├── gazebo/
│ │ ├── CMakeLists.txt
│ │ ├── include/
│ │ │ └── scenario/
│ │ │ └── gazebo/
│ │ │ ├── GazeboEntity.h
│ │ │ ├── GazeboSimulator.h
│ │ │ ├── Joint.h
│ │ │ ├── Link.h
│ │ │ ├── Log.h
│ │ │ ├── Model.h
│ │ │ ├── World.h
│ │ │ ├── components/
│ │ │ │ ├── BasePoseTarget.h
│ │ │ │ ├── BaseWorldAccelerationTarget.h
│ │ │ │ ├── BaseWorldVelocityTarget.h
│ │ │ │ ├── ExternalWorldWrenchCmdWithDuration.h
│ │ │ │ ├── HistoryOfAppliedJointForces.h
│ │ │ │ ├── JointAcceleration.h
│ │ │ │ ├── JointAccelerationTarget.h
│ │ │ │ ├── JointControlMode.h
│ │ │ │ ├── JointController.h
│ │ │ │ ├── JointControllerPeriod.h
│ │ │ │ ├── JointPID.h
│ │ │ │ ├── JointPositionTarget.h
│ │ │ │ ├── JointVelocityTarget.h
│ │ │ │ ├── SimulatedTime.h
│ │ │ │ └── Timestamp.h
│ │ │ ├── exceptions.h
│ │ │ ├── helpers.h
│ │ │ └── utils.h
│ │ └── src/
│ │ ├── GazeboSimulator.cpp
│ │ ├── Joint.cpp
│ │ ├── Link.cpp
│ │ ├── Model.cpp
│ │ ├── World.cpp
│ │ ├── helpers.cpp
│ │ └── utils.cpp
│ └── plugins/
│ ├── CMakeLists.txt
│ ├── ControllerRunner/
│ │ ├── CMakeLists.txt
│ │ ├── ControllerRunner.cpp
│ │ ├── ControllerRunner.h
│ │ ├── ControllersFactory.cpp
│ │ └── ControllersFactory.h
│ ├── JointController/
│ │ ├── CMakeLists.txt
│ │ ├── JointController.cpp
│ │ └── JointController.h
│ └── Physics/
│ ├── CMakeLists.txt
│ ├── CanonicalLinkModelTracker.hh
│ ├── EntityFeatureMap.hh
│ ├── Physics.cc
│ └── Physics.hh
├── setup.cfg
├── setup.py
└── tests/
├── .python/
│ ├── __init__.py
│ ├── test_contacts.py
│ ├── test_environments_gazebowrapper.py
│ ├── test_externalforce.py
│ ├── test_joint_force.py
│ ├── test_pendulum_wrt_ground_truth.py
│ ├── test_rates.py
│ ├── test_robot_base.py
│ ├── test_robot_controller.py
│ ├── test_runtimes.py
│ └── utils.py
├── __init__.py
├── assets/
│ └── worlds/
│ └── fuel_support.sdf
├── common/
│ ├── __init__.py
│ └── utils.py
├── test_gym_ignition/
│ ├── __init__.py
│ ├── test_inverse_kinematics.py
│ ├── test_normalization.py
│ ├── test_reproducibility.py
│ └── test_sdf_randomizer.py
└── test_scenario/
├── __init__.py
├── test_contacts.py
├── test_custom_controllers.py
├── test_external_wrench.py
├── test_gazebo_simulator.py
├── test_ignition_fuel.py
├── test_link_velocities.py
├── test_model.py
├── test_multi_world.py
├── test_pid_controllers.py
├── test_velocity_direct.py
└── test_world.py
================================================
FILE CONTENTS
================================================
================================================
FILE: .clang-format
================================================
---
Language: Cpp
# BasedOnStyle: WebKit
AccessModifierOffset: -4
AlignAfterOpenBracket: Align
AlignConsecutiveAssignments: false
AlignConsecutiveDeclarations: false
AlignEscapedNewlines: Left
AlignOperands: true
AlignTrailingComments: false
AllowAllParametersOfDeclarationOnNextLine: false
AllowShortBlocksOnASingleLine: false
AllowShortCaseLabelsOnASingleLine: false
AllowShortFunctionsOnASingleLine: Inline
AllowShortIfStatementsOnASingleLine: false
AllowShortLoopsOnASingleLine: false
AlwaysBreakAfterDefinitionReturnType: None
AlwaysBreakAfterReturnType: None
AlwaysBreakBeforeMultilineStrings: false
AlwaysBreakTemplateDeclarations: true
BinPackArguments: false
BinPackParameters: false
BraceWrapping:
AfterClass: true
AfterControlStatement: false
AfterEnum: true
AfterFunction: true
AfterNamespace: false
AfterObjCDeclaration: false
AfterStruct: true
AfterUnion: false
BeforeCatch: true
BeforeElse: true
IndentBraces: false
SplitEmptyFunction: false
SplitEmptyRecord: false
SplitEmptyNamespace: false
BreakBeforeBinaryOperators: NonAssignment
BreakBeforeBraces: Custom
BreakBeforeInheritanceComma: true
BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: true
BreakConstructorInitializers: BeforeComma
BreakAfterJavaFieldAnnotations: false
BreakStringLiterals: true
ColumnLimit: 80
CommentPragmas: '^ IWYU pragma:'
CompactNamespaces: false
ConstructorInitializerAllOnOneLineOrOnePerLine: false
ConstructorInitializerIndentWidth: 4
ContinuationIndentWidth: 4
Cpp11BracedListStyle: true
DerivePointerAlignment: false
DisableFormat: false
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: true
ForEachMacros:
- foreach
- Q_FOREACH
- BOOST_FOREACH
IncludeBlocks: Preserve
IncludeCategories:
- Regex: '^"(llvm|llvm-c|clang|clang-c)/'
Priority: 2
- Regex: '^(<|"(gtest|gmock|isl|json)/)'
Priority: 3
- Regex: '.*'
Priority: 1
IncludeIsMainRegex: '(Test)?$'
IndentCaseLabels: true
IndentWidth: 4
IndentWrappedFunctionNames: false
JavaScriptQuotes: Leave
JavaScriptWrapImports: true
KeepEmptyLinesAtTheStartOfBlocks: true
MacroBlockBegin: ''
MacroBlockEnd: ''
MaxEmptyLinesToKeep: 1
NamespaceIndentation: All
ObjCBlockIndentWidth: 4
ObjCSpaceAfterProperty: true
ObjCSpaceBeforeProtocolList: true
PenaltyBreakAssignment: 2
PenaltyBreakBeforeFirstCallParameter: 19
PenaltyBreakComment: 300
PenaltyBreakFirstLessLess: 120
PenaltyBreakString: 1000
PenaltyExcessCharacter: 1000000
PenaltyReturnTypeOnItsOwnLine: 60
PointerAlignment: Left
ReflowComments: true
SortIncludes: true
SortUsingDeclarations: true
SpaceAfterCStyleCast: true
SpaceAfterTemplateKeyword: true
SpaceBeforeAssignmentOperators: true
SpaceBeforeParens: ControlStatements
SpaceInEmptyParentheses: false
SpacesBeforeTrailingComments: 1
SpacesInAngles: false
SpacesInContainerLiterals: true
SpacesInCStyleCastParentheses: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
Standard: Cpp11
TabWidth: 4
UseTab: Never
...
================================================
FILE: .docker/Dockerfile
================================================
ARG from=diegoferigo/gym-ignition:pypi-master
FROM ${from}
# Install the PyPI package in a virtualenv
ARG pip_options=""
RUN virtualenv -p $(which python3) ${VIRTUAL_ENV} &&\
python -m pip install --upgrade pip &&\
pip install ${pip_options} gym-ignition &&\
rm -r $HOME/.cache/pip
# Clone the repository
WORKDIR /github
ARG branch="master"
RUN git clone -b ${branch} https://github.com/robotology/gym-ignition /github
# Reset the entrypoint
ENTRYPOINT [""]
CMD ["bash"]
================================================
FILE: .docker/base.Dockerfile
================================================
ARG from=ubuntu:focal
FROM ${from}
SHELL ["/bin/bash", "-c"]
# Setup locales and timezone
ARG TZ=Europe/Rome
ARG DEBIAN_FRONTEND=noninteractive
RUN rm -f /etc/localtime &&\
ln -s /usr/share/zoneinfo/"${TZ}" /etc/localtime &&\
apt-get update &&\
apt-get install -y --no-install-recommends locales locales-all tzdata &&\
rm -rf /var/lib/apt/lists/*
# Install tools and toolchain
RUN apt-get update &&\
apt-get install -y --no-install-recommends \
wget \
software-properties-common \
apt-transport-https \
apt-utils \
gnupg2 \
nano \
rename \
source-highlight \
&&\
wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | apt-key add - && \
apt-add-repository "deb https://apt.kitware.com/ubuntu/ `lsb_release -cs` main" &&\
add-apt-repository ppa:deadsnakes/ppa &&\
wget -nv -O - http://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - &&\
apt-add-repository -y "deb http://apt.llvm.org/`lsb_release -cs`/ llvm-toolchain-`lsb_release -cs`-10 main" &&\
apt-get update &&\
apt-get install -y --no-install-recommends \
build-essential \
clang-10 \
git \
cmake \
cmake-curses-gui \
ninja-build \
valgrind \
libgflags-dev \
python3-pip \
python3-wheel \
python3.8 \
python3.8-dev \
libpython3.8-dev \
virtualenv \
swig \
&&\
rm -rf /var/lib/apt/lists/*
# Update git (required by actions/checkout)
RUN add-apt-repository ppa:git-core/ppa &&\
apt-get update &&\
apt-get install -y --no-install-recommends git &&\
rm -rf /var/lib/apt/lists/*
# Common virtualenv properties
ENV VIRTUAL_ENV=/venv
ENV PATH=$VIRTUAL_ENV/bin:$PATH
CMD ["bash"]
================================================
FILE: .docker/cicd-devel.Dockerfile
================================================
ARG from=diegoferigo/gym-ignition:base
FROM ${from}
RUN pip3 install vcstool colcon-common-extensions &&\
rm -r $HOME/.cache/pip
ARG CMAKE_BUILD_TYPE="Release"
ARG ignition_codename="fortress"
ARG IGNITION_DEFAULT_CHANNEL="stable"
RUN echo "deb http://packages.osrfoundation.org/gazebo/ubuntu-${IGNITION_DEFAULT_CHANNEL} `lsb_release -cs` main" > \
/etc/apt/sources.list.d/gazebo-${IGNITION_DEFAULT_CHANNEL}.list &&\
wget http://packages.osrfoundation.org/gazebo.key -O - | apt-key add - &&\
apt-get update &&\
mkdir -p /workspace/src &&\
cd /workspace/src &&\
wget https://raw.githubusercontent.com/ignition-tooling/gazebodistro/master/collection-${ignition_codename}.yaml &&\
vcs import < collection-${ignition_codename}.yaml &&\
apt -y install --no-install-recommends \
$(sort -u $(find . -iname 'packages-'$(lsb_release -cs)'.apt' -o -iname 'packages.apt') | grep -v -E "dart|^libignition|^libsdformat" | tr '\n' ' ') &&\
rm -rf /var/lib/apt/lists/* &&\
cd /workspace &&\
colcon graph &&\
colcon build \
--cmake-args \
-GNinja \
-DBUILD_TESTING:BOOL=OFF \
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
--merge-install \
&&\
find build/ -type f -not -name 'CMakeCache.txt' -delete &&\
echo "source /workspace/install/setup.bash" >> /etc/bash.bashrc
# Source /etc/bash.bashrc before each command
SHELL ["/bin/bash", "-i", "-c"]
COPY entrypoint.sh /entrypoint.sh
COPY setup_virtualenv.sh /setup_virtualenv.sh
RUN chmod 755 /entrypoint.sh
RUN chmod 755 /setup_virtualenv.sh
ENTRYPOINT ["/entrypoint.sh"]
CMD ["bash"]
================================================
FILE: .docker/cicd-master.Dockerfile
================================================
ARG from=diegoferigo/gym-ignition:base
FROM ${from}
# Install ignition gazebo
ARG ignition_codename="fortress"
RUN echo "deb http://packages.osrfoundation.org/gazebo/ubuntu-stable `lsb_release -cs` main" \
> /etc/apt/sources.list.d/gazebo-stable.list &&\
wget http://packages.osrfoundation.org/gazebo.key -O - | apt-key add - &&\
apt-get update &&\
apt-get install -y --no-install-recommends ignition-${ignition_codename} &&\
rm -rf /var/lib/apt/lists/*
COPY entrypoint.sh /entrypoint.sh
COPY setup_virtualenv.sh /setup_virtualenv.sh
RUN chmod 755 /entrypoint.sh
RUN chmod 755 /setup_virtualenv.sh
ENTRYPOINT ["/entrypoint.sh"]
CMD ["bash"]
================================================
FILE: .docker/docker-compose.yml
================================================
version: '3.0'
services:
base:
build:
args:
from: ubuntu:focal
context: .
dockerfile: base.Dockerfile
image: diegoferigo/gym-ignition:base
# ======
# MASTER
# ======
ci-master:
build:
args:
from: diegoferigo/gym-ignition:base
ignition_codename: dome
context: .
dockerfile: cicd-master.Dockerfile
image: diegoferigo/gym-ignition:ci-master
pypi-master:
build:
args:
from: diegoferigo/gym-ignition:base
ignition_codename: dome
context: .
dockerfile: cicd-master.Dockerfile
image: diegoferigo/gym-ignition:pypi-master
# =====
# DEVEL
# =====
ci-devel:
build:
args:
from: diegoferigo/gym-ignition:base
ignition_codename: fortress
CMAKE_BUILD_TYPE: Debug
context: .
dockerfile: cicd-devel.Dockerfile
image: diegoferigo/gym-ignition:ci-devel
pypi-devel:
build:
args:
from: diegoferigo/gym-ignition:base
ignition_codename: fortress
CMAKE_BUILD_TYPE: Release
context: .
dockerfile: cicd-devel.Dockerfile
image: diegoferigo/gym-ignition:pypi-devel
# ====
# DEMO
# ====
latest:
build:
args:
from: diegoferigo/gym-ignition:pypi-master
branch: master
context: .
dockerfile: Dockerfile
image: diegoferigo/gym-ignition:latest
nightly:
build:
args:
from: diegoferigo/gym-ignition:ci-devel
branch: devel
pip_options: --pre
context: .
dockerfile: Dockerfile
image: diegoferigo/gym-ignition:nightly
================================================
FILE: .docker/entrypoint.sh
================================================
#!/bin/bash
set -e
if [ ! -x "/setup_virtualenv.sh" ] ; then
echo "File '/setup_virtualenv.sh' not found."
exit 1
fi
# Setup the python virtualenv
bash /setup_virtualenv.sh
# If a CMD is passed, execute it
exec "$@"
================================================
FILE: .docker/setup_virtualenv.sh
================================================
#!/bin/bash
set -eu
# Read the env variable if exists, otherwise fall back to python3
PYTHON_EXE=python${PYTHON_VERSION:-3}
if [ ! -x $(type -P ${PYTHON_EXE}) ] ; then
echo "Failed to find ${PYTHON_EXE} in PATH"
exit 1
fi
# Create an empty virtualenv and enable it by default
virtualenv -p $PYTHON_EXE ${VIRTUAL_ENV}
# Install pytest in the virtual environment
${VIRTUAL_ENV}/bin/pip3 install pytest
================================================
FILE: .github/CODEOWNERS
================================================
# Default owners
* @diegoferigo
================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.md
================================================
---
name: Bug report
about: Report a bug
labels: issue::type::bug
---
⚠️ If you're not sure whether your problem is a bug, please ask a question in [Discussions][Discussions] instead.
- [ ] I've read the [FAQ][FAQ]
- [ ] I've read the [Support Policy][Support Policy]
- [ ] I've already searched similar [issues][Issues] and [discussions][Discussions]
- [ ] I've already updated to the most recent supported [release][Releases] (either Stable or Nightly)
[Issues]: https://github.com/robotology/gym-ignition/issues
[Releases]: https://github.com/robotology/gym-ignition/releases
[FAQ]: https://robotology.github.io/gym-ignition/master/info/faq.html
[Discussions]: https://github.com/robotology/gym-ignition/discussions
[Support Policy]: https://robotology.github.io/gym-ignition/master/installation/support_policy.html
## Description:
A clear and concise description of the bug and what should be the expected behavior.
## Steps to reproduce
1.
1.
1.
or
```python
# Insert some code
```
## Additional context
Title
Content
## Environment
- OS:
- GPU:
- Python:
- Version:
- Channel:
- [ ] Stable
- [ ] Nightly
- Installation type:
- [ ] User
- [ ] Developer
================================================
FILE: .github/ISSUE_TEMPLATE/feature_request.md
================================================
---
name: Feature request
about: Request a new feature
labels: issue::type::enhancement
---
⚠️ If you're not sure on the specifics of the feature or would like a broader discussion, please consider posting a proposal to [Discussions][Discussions] instead.
[Discussions]: https://github.com/robotology/gym-ignition/discussions
## Summary
Describe what this new feature is about and the context you would find it useful.
## Implementation suggestion
Provide a suggestion on how to implement this feature, which could help us to speed up the implementation.
## Additional context
Screenshots, videos, links, sketches, ...
================================================
FILE: .github/release.yml
================================================
changelog:
exclude:
labels:
- pr::changelog::ignore
authors: [ ]
categories:
- title: Bug fixing and new features
labels:
- pr::changelog::bugfix
- pr::changelog::release
- pr::changelog::feature
- title: Documentation, CI/CD, tests
labels:
- pr::changelog::docs
- pr::changelog::cicd
- pr::changelog::tests
- title: Other changes
labels:
- pr::changelog::other
- title: No category
labels: [ "*" ]
================================================
FILE: .github/workflows/cicd.yml
================================================
name: CI/CD
on:
push:
branches: ["**"]
tags-ignore: ["**"]
pull_request:
workflow_dispatch:
release:
types: [published]
jobs:
# =============
build-colcon:
# =============
name: 'colcon@${{ matrix.ignition }}'
if: |
(github.event_name == 'push' && github.ref != 'refs/heads/master') ||
(github.event_name == 'pull_request' && github.event.pull_request.base.ref != 'master')
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
ignition:
# - dome
# - edifice
- fortress
env:
CCACHE_DIR: ${{ github.workspace }}/.ccache
steps:
- name: '🔍️ Inspect Environment'
run: |
env | grep ^GITHUB
echo ""
cat ${GITHUB_EVENT_PATH}
echo ""
env
- name: '⬇️️ Install dependencies'
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
lz4 \
git \
wget \
gpg-agent \
ninja-build \
build-essential \
software-properties-common
- name: '🐍 Initialize Python'
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: '🚚 Compilation cache'
uses: actions/cache@v2
with:
path: ${{ env.CCACHE_DIR }}
# We include the commit sha in the cache key, as new cache entries are
# only created if there is no existing entry for the key yet.
key: ${{ runner.os }}-ccache-${{ matrix.ignition }}-${{ github.sha }}
# Restore any ccache cache entry, if none for the key above exists
restore-keys: ${{ runner.os }}-ccache-${{ matrix.ignition }}
- name: '🚚 Enable ccache'
run: |
sudo apt-get update
sudo apt-get install -y ccache
echo "/usr/lib/ccache" >> $GITHUB_PATH
ccache --set-config=max_size=5.0G
ccache --set-config=sloppiness=file_macro,locale,time_macros
ccache -p
ccache -s
- name: '⚙️ Add osrf ppa'
run: |
sudo sh -c 'echo "deb http://packages.osrfoundation.org/gazebo/ubuntu-stable `lsb_release -cs` main" >\
/etc/apt/sources.list.d/gazebo-stable.list'
wget http://packages.osrfoundation.org/gazebo.key -O - | sudo apt-key add -
sudo apt-get update
- name: '⚙️ Prepare colcon workspace'
run: |
pip install vcstool colcon-common-extensions
sudo mkdir -p /workspace/src /workspace/install
sudo chmod -R a+rw /workspace
cd /workspace/src
wget -O - ${TAGS_YAML} | vcs import
echo $(sort -u $(find . -iname 'packages-'$(lsb_release -cs)'.apt' -o -iname 'packages.apt') | grep -v -E "^libignition|^libsdformat" | tr '\n' ' ') \
> /workspace/install/pkg.txt
xargs -a /workspace/install/pkg.txt sudo apt-get install -y --no-install-recommends
env:
TAGS_YAML: https://raw.githubusercontent.com/ignition-tooling/gazebodistro/master/collection-${{ matrix.ignition }}.yaml
- name: '🏗️ Build colcon workspace'
run: |
cd /workspace
colcon graph
colcon build \
--cmake-args \
-GNinja \
-DBUILD_TESTING:BOOL=OFF \
-DCMAKE_BUILD_TYPE=Debug \
--merge-install
- name: '📈 Ccache stats'
run: ccache --show-stats
- name: '📦️ Compress the workspace'
run: tar -I lz4 -cf /tmp/workspace_install.tar.lz4 /workspace/install
- name: '⬆️ Upload the workspace'
uses: actions/upload-artifact@v2
with:
path: /tmp/workspace_install.tar.lz4
name: workspace-${{ matrix.ignition }}
retention-days: 1
# ===============
build-and-test:
# ===============
name: 'Build and Test [${{matrix.type}}|${{matrix.ignition}}|${{matrix.python}}]'
if: always()
needs: [ build-colcon ]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os:
- ubuntu-latest
type:
- User
- Developer
ignition:
# - dome
# - edifice
- fortress
python:
- 3.8
- 3.9
steps:
- name: '🔍 Inspect Environment'
run: |
env | grep ^GITHUB
echo ""
cat ${GITHUB_EVENT_PATH}
echo ""
env
- name: '⬇️ Install build dependencies'
if: contains(matrix.os, 'ubuntu')
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
lz4 \
git \
wget \
cmake \
gpg-agent \
ninja-build \
build-essential \
software-properties-common
- name: '🐍 Initialize Python'
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: '🔀 Clone repository'
uses: actions/checkout@master
- name: '🔀 Download all refs'
run: git fetch --prune --unshallow
# ================
# Install Ignition
# ================
- name: '⚙️ Add osrf ppa'
if: contains(matrix.os, 'ubuntu')
run: |
sudo sh -c 'echo "deb http://packages.osrfoundation.org/gazebo/ubuntu-stable `lsb_release -cs` main" >\
/etc/apt/sources.list.d/gazebo-stable.list'
wget http://packages.osrfoundation.org/gazebo.key -O - | sudo apt-key add -
sudo apt-get update
- name: '[🔒️|stable] Install Ignition from ppa'
if: |
contains(matrix.os, 'ubuntu') && (
github.event_name == 'release' ||
github.ref == 'refs/heads/master' ||
(github.event_name == 'pull_request' && github.event.pull_request.base.ref == 'master')
)
run: sudo apt-get install -y --no-install-recommends ignition-${{ matrix.ignition }}
- name: '[🧪|nightly] Download pre-built colcon workspace'
uses: actions/download-artifact@v2
if: |
contains(matrix.os, 'ubuntu') && (
(github.event_name == 'push' && github.ref != 'refs/heads/master') ||
(github.event_name == 'pull_request' && github.event.pull_request.base.ref != 'master')
)
with:
path: /tmp
name: workspace-${{ matrix.ignition }}
- name: '[🧪|nightly] Setup colcon workspace'
if: |
contains(matrix.os, 'ubuntu') && (
(github.event_name == 'push' && github.ref != 'refs/heads/master') ||
(github.event_name == 'pull_request' && github.event.pull_request.base.ref != 'master')
)
run: |
sudo tar -I lz4 -xf /tmp/workspace_install.tar.lz4 -C /
xargs -a /workspace/install/pkg.txt sudo apt-get install -y --no-install-recommends
echo "source /workspace/install/setup.bash" | sudo tee -a /etc/bash.bashrc
# =============
# Build project
# =============
# This is required because ScenarIO needs to import the iDynTree targets
- name: '⬇️ Install iDynTree dependencies'
if: contains(matrix.os, 'ubuntu')
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
libxml2-dev coinor-libipopt-dev libeigen3-dev libassimp-dev swig
- name: '🤖 Install iDynTree'
run: |
pip install idyntree
IDYNTREE_PYTHON_PKG=$(python3 -c 'import idyntree, pathlib; print(pathlib.Path(idyntree.__file__).parent)')
echo "CMAKE_PREFIX_PATH=$IDYNTREE_PYTHON_PKG" >> $GITHUB_ENV
# Note: In order to execute the setup.sh script, the file /etc/bash.bashrc must be sourced.
# To do that, we change the shell to a bash interactive session with 'bash -i -e'.
# Developer installation
- name: '[👷|developer] Build and Install C++ ScenarIO'
if: matrix.type == 'Developer'
shell: bash -i -e {0}
run: |
env
cmake -S . -B build/ \
-GNinja \
-DCMAKE_BUILD_TYPE=Debug \
-DIGNITION_DISTRIBUTION=$(python3 -c "print('${{ matrix.ignition }}'.capitalize())")
sudo cmake --build build/ --target install
- name: '[👷|developer] Install Python ScenarIO'
if: matrix.type == 'Developer'
run: pip install -e ./scenario
# User installation
- name: '[👤|user] Create wheel (default ignition)'
if: matrix.type == 'User' && matrix.ignition == 'fortress'
shell: bash -i -e {0}
run: pip wheel --use-feature=in-tree-build -v -w dist/ ./scenario
# Note: Calling "pip wheel" with "--global-option" forces all dependencies to be built from their sdist.
# Since it's very slow, we create the wheel from setup.py without isolation (requires system deps).
- name: '[👤|user] Create wheel (custom ignition)'
if: matrix.type == 'User' && matrix.ignition != 'fortress'
shell: bash -i -e {0}
run: |
pip install wheel setuptools-scm cmake-build-extension
python3 ./scenario/setup.py bdist_wheel \
build_ext -DIGNITION_DISTRIBUTION=$(python3 -c "print('${{ matrix.ignition }}'.capitalize())")
- name: '[👤|user] Install local wheel'
if: matrix.type == 'User'
run: pip install -v dist/scenario-*.whl
- name: '🔍️ Inspect installed ScenarIO package'
if: matrix.type == 'User' && contains(matrix.os, 'ubuntu')
run: |
sudo apt-get install -y --no-install-recommends tree
tree $(python3 -c "import scenario, pathlib; print(pathlib.Path(scenario.__file__).parent)")
# ====================
# Install gym-ignition
# ====================
- name: '🐍 Install gym-ignition'
run: pip install wheel && pip install .[all]
# ============
# Test project
# ============
- name: '🔍 Inspect installed versions'
run: pip list | grep -E "^scenario|^gym-ignition"
- name: '[🐍|scenario] Python Tests'
shell: bash -i -e {0}
run: pytest -m "scenario"
- name: '[🚨|scenario] Python Tests with Valgrind'
shell: bash -i -e {0}
if: failure()
run: |
sudo apt-get install -y --no-install-recommends valgrind
pip install colour-valgrind
valgrind --log-file=/tmp/valgrind.log pytest -m "scenario" || colour-valgrind -t /tmp/valgrind.log
- name: '[🐍|gym-ignition] Python Tests'
shell: bash -i -e {0}
run: pytest -m "gym_ignition"
- name: '[🚨|gym-ignition] Python Tests with Valgrind'
shell: bash -i -e {0}
if: failure()
run: |
sudo apt-get install -y --no-install-recommends valgrind
pip install colour-valgrind
valgrind --log-file=/tmp/valgrind.log pytest -m "gym_ignition" || colour-valgrind -t /tmp/valgrind.log
# ============================
# Upload artifacts (only User)
# ============================
- name: '🗑️ Remove external wheels'
if: matrix.type == 'User'
run: find dist/ -type f -not -name 'scenario-*' -delete -print
# We have to trick PyPI that our wheels are manylinux2014 even if they are not.
# Unfortunately we cannot create self-contained wheels (neither the PEP600 perennial)
# due to the Ignition architecture.
- name: '📝 Rename scenario wheel'
if: matrix.type == 'User' && contains(matrix.os, 'ubuntu')
run: |
ls dist/
find dist/ -type f -name "*.whl" -exec rename.ul linux manylinux2014 {} +
ls dist/
- name: '🔍 Inspect dist folder'
if: matrix.type == 'User'
run: ls -lah dist/
- name: '⬆️ Upload artifacts'
uses: actions/upload-artifact@v2
if: matrix.type == 'User' && matrix.ignition == 'fortress'
with:
path: dist/*
name: dist
# =======
# Website
# =======
- name: '⬇️ Install website dependencies'
run: |
sudo apt-get update
sudo apt-get install -y doxygen texlive-font-utils
- name: '🔍 Inspect metadata'
run: sphinx-multiversion --dump-metadata docs/sphinx/ build/
# This is necessary because otherwise the check for uncommitted apidoc
# only detects additions, not removals.
- name: '🗑️ Remove apidoc folder'
run: rm -r docs/sphinx/apidoc
- name: '🏗️ Build sphinx website'
shell: bash -i -e {0}
run: |
[[ -d build ]] && sudo chown -R $(id -u):$(id -g) build/
cmake -S . -B build/ -DBUILD_DOCS:BOOL=ON
cmake --build build/ --target sphinx
- name: '🔍 Check new uncommitted apidoc'
run: test $(git status --porcelain | wc -l) -eq 0
- name: '🔍 git status'
if: ${{ failure() }}
run: |
git status
git diff
- name: '⬆️ Upload website folder'
uses: actions/upload-artifact@v2
if: matrix.type == 'User' && matrix.ignition == 'fortress' && contains(matrix.os, 'ubuntu')
with:
path: build/html
name: website
retention-days: 1
# ===================
website-deployment:
# ===================
name: 'Website Deployment'
if: always() && needs.build-and-test.result == 'success'
needs: [ build-and-test ]
runs-on: ubuntu-latest
steps:
- name: '⬇️ Download website folder'
uses: actions/download-artifact@v2
with:
path: build/html
name: website
- name: '🔍 Inspect html folder'
run: ls -lah build/html
- name: '🚀 Deploy'
uses: JamesIves/github-pages-deploy-action@releases/v3
if: |
github.event_name == 'push' &&
github.repository == 'robotology/gym-ignition' &&
(github.ref == 'refs/heads/master' || github.ref == 'refs/heads/devel')
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BRANCH: gh-pages
FOLDER: build/html
CLEAN: true
CLEAN_EXCLUDE: '[".gitignore", ".nojekyll"]'
# =================
python-packaging:
# =================
name: 'Python packaging'
if: always() && needs.build-and-test.result == 'success'
needs: [ build-and-test ]
runs-on: ubuntu-latest
steps:
- name: '🔍️ Inspect Environment'
run: |
env | grep ^GITHUB
echo ""
cat ${GITHUB_EVENT_PATH}
echo ""
env
# Any Python3 version is ok in this job
- name: '🐍 Initialize Python'
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: '🔀 Clone repository'
uses: actions/checkout@master
- name: '🔀 Download all refs'
run: git fetch --prune --unshallow
# ===============
# Download wheels
# ===============
- name: '⬇️ Download scenario wheels'
uses: actions/download-artifact@v2
with:
path: dist
name: dist
- name: '🔍 Inspect dist folder'
run: ls -lah dist/
# ===============
# Create packages
# ===============
# We use build to create sdist. Also pipx would work.
# https://github.com/pypa/build
# Compared to calling setup.py, the advantage of these tools is that
# they automatically handle the build dependencies.
- name: '🐍️️ Install dependencies'
run: pip install build
- name: '[📦|scenario]️ Create sdist'
run: python -m build --sdist scenario/ -o dist/
- name: '[📦|gym-ignition]️ Create sdist and wheel'
run: python -m build .
# ================
# Upload artifacts
# ================
- name: '🗑️ Remove external packages'
run: find dist/ -type f -not \( -name '*scenario-*' -o -name '*gym_ignition-*' \) -delete -print
- name: '🔍 Check packages'
run: pipx run twine check dist/*
- name: '🔍 Inspect dist folder'
run: ls -lah dist/
- name: '⬆️ Upload artifacts'
uses: actions/upload-artifact@v2
with:
path: dist/*
name: dist
# ============
upload_pypi:
# ============
name: Publish to PyPI
if: always() && needs.build-and-test.result == 'success' && needs.python-packaging.result == 'success'
needs:
- build-and-test
- python-packaging
runs-on: ubuntu-latest
# Devel branch produces pre-releases.
# GitHub releases produce stable releases.
steps:
# Needed only to extract from the git repo the last revision
- name: '🔀 Clone repository'
uses: actions/checkout@master
- name: '🔀 Download all refs'
run: git fetch --prune --unshallow
- name: '⬇️ Download Python packages'
uses: actions/download-artifact@v2
with:
path: dist
name: dist
- name: '🔍 Inspect dist folder'
run: ls -lah dist/
# Validate the last tag accordingly to PEP440
# From https://stackoverflow.com/a/37972030/12150968
- name: '📌 Check PEP440 compliance'
if: github.event_name == 'release'
run: |
sudo apt-get update
sudo apt-get install -y source-highlight
last_tag_with_v="$(git describe --abbrev=0 --tags)"
last_tag=${last_tag_with_v#v}
rel_regexp='^(\d+!)?(\d+)(\.\d+)+([\.\-\_])?((a(lpha)?|b(eta)?|c|r(c|ev)?|pre(view)?)\d*)?(\.?(post|dev)\d*)?$'
echo ""
echo $last_tag
echo ""
check-regexp ${rel_regexp} ${last_tag}
match=$(check-regexp ${rel_regexp} ${last_tag} | grep matches | cut -d ' ' -f 5)
test $match -eq 1 && true
- name: '⬆️ Publish packages to PyPI'
if: |
github.repository == 'robotology/gym-ignition' &&
((github.event_name == 'release' && github.event.action == 'published') ||
(github.event_name == 'push' && github.ref == 'refs/heads/devel'))
uses: pypa/gh-action-pypi-publish@master
with:
user: __token__
password: ${{ secrets.PYPI_TOKEN }}
skip_existing: true
================================================
FILE: .github/workflows/docker.yml
================================================
name: Docker Images
on:
push:
paths:
- ".docker/*"
- ".github/workflows/docker.yml"
branches: ["**"]
tags-ignore: ["**"]
pull_request:
paths:
- ".docker/*"
- ".github/workflows/docker.yml"
workflow_dispatch:
schedule:
# Execute a weekly build on Monday at 2AM UTC
- cron: '0 2 * * 1'
jobs:
docker:
name: 'diegoferigo/gym-ignition:${{ matrix.tag }}'
runs-on: ubuntu-latest
strategy:
max-parallel: 1
matrix:
baseimage: ['ubuntu:bionic']
tag:
- base
- ci-master
- ci-devel
- pypi-master
- pypi-devel
#- latest
- nightly
steps:
# Use the branch that triggered the workflow for push and pull_request events
- uses: actions/checkout@master
if: github.event_name != 'schedule'
# Use devel branch for scheduled builds
- uses: actions/checkout@master
with:
ref: 'refs/heads/devel'
if: github.event_name == 'schedule'
- name: Build
env:
TAG: ${{ matrix.tag }}
run: |
cd .docker
docker-compose build ${TAG}
- name: Login
if: |
github.repository == 'robotology/gym-ignition' &&
github.event_name != 'pull_request' &&
(github.event_name == 'schedule' || github.ref == 'refs/heads/devel')
env:
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
run: echo ${DOCKER_PASSWORD} | docker login --username ${DOCKER_USERNAME} --password-stdin
- name: Push
if: |
github.repository == 'robotology/gym-ignition' &&
github.event_name != 'pull_request' &&
(github.event_name == 'schedule' || github.ref == 'refs/heads/devel')
run: docker push diegoferigo/gym-ignition:${{ matrix.tag }}
================================================
FILE: .github/workflows/style.yml
================================================
name: Code Style
on:
push:
branches: ["**"]
tags-ignore: ["**"]
pull_request:
workflow_dispatch:
jobs:
clang-format:
name: clang-format
runs-on: ubuntu-latest
steps:
- name: "🔀 Checkout repository"
uses: actions/checkout@v2
- name: "⬇️️ Install dependencies"
run: sudo apt-get install -y --no-install-recommends colordiff
- name: "📝 Format C++"
uses: diegoferigo/gh-action-clang-format@main
id: format
with:
style: file
pattern: |
*.h
*.cpp
- name: "🔍️ Inspect style diff"
if: failure()
run: printf "${{ steps.format.outputs.diff }}" | colordiff
black:
name: black
runs-on: ubuntu-latest
steps:
- name: "🔀 Checkout repository"
uses: actions/checkout@v2
- name: '🐍 Initialize Python'
uses: actions/setup-python@v2
with:
python-version: "3.8"
- name: "📝 Black Code Formatter"
uses: psf/black@stable
with:
options: --check --diff --color
isort:
name: isort
runs-on: ubuntu-latest
steps:
- name: "🔀 Checkout repository"
uses: actions/checkout@v2
- name: '🐍 Initialize Python'
uses: actions/setup-python@v2
with:
python-version: "3.8"
- name: "📝 isort"
uses: isort/isort-action@master
with:
configuration: --check --diff --color
================================================
FILE: .gitignore
================================================
*.mexa64
*.mexmaci64
*.autosave
*.original
*~
.DS_Store
build*
*.bak
*.old
CMakeLists.txt.*
Copy_of_*
__pycache__
*.egg-info
*.cache
.idea*
**/dist/*
.egg*
================================================
FILE: CMakeLists.txt
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
# This file is here only to allow build systems to find the
# real CMake project that is stored in the scenario/ folder.
cmake_minimum_required(VERSION 3.16)
project(scenario)
add_subdirectory(scenario)
# The uninstall target resource must be included in the top-level CMakeLists
list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/scenario/cmake)
include(AddUninstallTarget)
# =============
# DOCUMENTATION
# =============
if(BUILD_DOCS)
add_subdirectory(docs)
endif()
================================================
FILE: LICENSE
================================================
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc.
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
General Public License.
"The Library" refers to a covered work governed by this License,
other than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
Version".
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
version:
a) under this License, provided that you make a good faith effort to
ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from
a header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the
Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license
document.
4. Combined Works.
You may convey a Combined Work under terms of your choice that,
taken together, effectively do not restrict modification of the
portions of the Library contained in the Combined Work and reverse
engineering for debugging such modifications, if you also do each of
the following:
a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license
document.
c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this
License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
1) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (a) uses at run time
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
Version.
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the
GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the
Application with a modified version of the Linked Version. (If
you use option 4d0, the Installation Information must accompany
the Minimal Corresponding Source and Corresponding Application
Code. If you use option 4d1, you must provide the Installation
Information in the manner specified by section 6 of the GNU GPL
for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the
Library side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based
on the Library, uncombined with any other library facilities,
conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the
Library as you received it specifies that a certain numbered version
of the GNU Lesser General Public License "or any later version"
applies to it, you have the option of following the terms and
conditions either of that published version or of any later version
published by the Free Software Foundation. If the Library as you
received it does not specify a version number of the GNU Lesser
General Public License, you may choose any version of the GNU Lesser
General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.
================================================
FILE: README.md
================================================
gym-ignition
> ⚠️ **Warning** ⚠️
>
> This project is no longer actively maintained, and development has stalled.
> For an in-depth description of the current status and actionable steps to revive development, please consult [robotology/gym-ignition#430]([url](https://github.com/robotology/gym-ignition/issues/430)).
||||
|:---:|:---:|:---:|
| ![][pendulum] | ![][panda] | ![][icub] |
[icub]: https://user-images.githubusercontent.com/469199/99262746-9e021a80-281e-11eb-9df1-d70134b0801a.png
[panda]: https://user-images.githubusercontent.com/469199/99263111-0cdf7380-281f-11eb-9cfe-338b2aae0503.png
[pendulum]: https://user-images.githubusercontent.com/469199/99262383-321fb200-281e-11eb-89cc-cc31f590daa3.png
## Description
**gym-ignition** is a framework to create **reproducible robotics environments** for reinforcement learning research.
It is based on the [ScenarIO](scenario/) project which provides the low-level APIs to interface with the Ignition Gazebo simulator.
By default, RL environments share a lot of boilerplate code, e.g. for initializing the simulator or structuring the classes
to expose the `gym.Env` interface.
Gym-ignition provides the [`Task`](python/gym_ignition/base/task.py) and [`Runtime`](python/gym_ignition/base/runtime.py)
abstractions that help you focusing on the development of the decision-making logic rather than engineering.
It includes [randomizers](python/gym_ignition/randomizers) to simplify the implementation of domain randomization
of models, physics, and tasks.
Gym-ignition also provides powerful dynamics algorithms compatible with both fixed-base and floating-based robots by
exploiting [robotology/idyntree](https://github.com/robotology/idyntree/) and exposing
[high-level functionalities](python/gym_ignition/rbd/idyntree).
Gym-ignition does not provide out-of-the-box environments ready to be used.
Rather, its aim is simplifying and streamlining their development.
Nonetheless, for illustrative purpose, it includes canonical examples in the
[`gym_ignition_environments`](python/gym_ignition_environments) package.
Visit the [website][website] for more information about the project.
[website]: https://robotology.github.io/gym-ignition
## Installation
1. First, follow the installation instructions of [ScenarIO](scenario/).
2. `pip install gym-ignition`, preferably in a [virtual environment](https://docs.python.org/3.8/tutorial/venv.html).
## Contributing
You can visit our community forum hosted in [GitHub Discussions](https://github.com/robotology/gym-ignition/discussions).
Even without coding skills, replying user's questions is a great way of contributing.
If you use gym-ignition in your application and want to show it off, visit the
[Show and tell](https://github.com/robotology/gym-ignition/discussions/categories/show-and-tell) section!
You can advertise there your environments created with gym-ignition.
Pull requests are welcome.
For major changes, please open a [discussion](https://github.com/robotology/gym-ignition/discussions)
first to propose what you would like to change.
## Citation
```bibtex
@INPROCEEDINGS{ferigo2020gymignition,
title={Gym-Ignition: Reproducible Robotic Simulations for Reinforcement Learning},
author={D. {Ferigo} and S. {Traversaro} and G. {Metta} and D. {Pucci}},
booktitle={2020 IEEE/SICE International Symposium on System Integration (SII)},
year={2020},
pages={885-890},
doi={10.1109/SII46433.2020.9025951}
}
```
## License
[LGPL v2.1](https://choosealicense.com/licenses/lgpl-2.1/) or any later version.
---
**Disclaimer:** Gym-ignition is an independent project and is not related by any means to OpenAI and Open Robotics.
================================================
FILE: docs/CMakeLists.txt
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
# This variable is filled from the doxygen/ folder.
# Defined here empty to remember that it exists in this scope.
set(DOXYGEN_OUTPUT_DIRECTORY "")
add_subdirectory(doxygen)
add_subdirectory(sphinx)
================================================
FILE: docs/doxygen/CMakeLists.txt
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
find_package(Doxygen REQUIRED)
set(DOXYGEN_GENERATE_XML YES)
set(DOXYGEN_GENERATE_HTML NO)
set(DOXYGEN_GENERATE_LATEX YES)
set(DOXYGEN_FILE_PATTERNS "*.h")
set(DOXYGEN_BUILTIN_STL_SUPPORT YES)
set(DOXYGEN_EXCLUDE_PATTERNS
*/components/* */plugins/* */details/* */private/*
*/examples/* */tests/* */gympp/*)
doxygen_add_docs(
doxygen
${PROJECT_SOURCE_DIR}
COMMENT "Generate man pages"
)
# This is required by Sphinx to find Doxygen XMLs
set(DOXYGEN_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} PARENT_SCOPE)
================================================
FILE: docs/sphinx/CMakeLists.txt
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
# Useful folders:
#
# 1. The folder containing the Python packages
# 2. The folder containing the bindings Python modules
#
set(PYTHON_PACKAGES_DIR "${CMAKE_SOURCE_DIR}/python")
set(BINDINGS_MODULES_DIR "${PROJECT_BINARY_DIR}/scenario/bindings")
# =============
# APIDOC TARGET
# =============
# This target generates the apidoc files of the Python modules and stores
# them in the docs/sphinx/ source folder.
# In order to make the multiversion website work, they need to be committed
# when they change.
#
# The files in the following folders are replaced:
#
# - docs/sphinx/scenario
# - docs/sphinx/gym_ignition
# - docs/sphinx/gym_ignition_environments
if (NOT TARGET ScenarioSwig::Core)
message(FATAL_ERROR "Target ScenarioSwig::Core not found")
endif()
if (NOT TARGET ScenarioSwig::Gazebo)
message(FATAL_ERROR "Target ScenarioSwig::Gazebo not found")
endif()
find_package(SphinxApidoc REQUIRED)
add_custom_target(apidoc ALL DEPENDS
doxygen ScenarioSwig::Core ScenarioSwig::Gazebo)
add_custom_command(
TARGET apidoc POST_BUILD
COMMAND
${SPHINX_APIDOC_EXECUTABLE} --force
--no-toc --module-first --maxdepth 4
-t ${CMAKE_CURRENT_SOURCE_DIR}/_templates
-o ${CMAKE_CURRENT_SOURCE_DIR}/apidoc/gym-ignition
${PYTHON_PACKAGES_DIR}/gym_ignition
COMMENT "Building apidoc")
add_custom_command(
TARGET apidoc POST_BUILD
COMMAND
${SPHINX_APIDOC_EXECUTABLE} --force
--no-toc --module-first --maxdepth 4
-t ${CMAKE_CURRENT_SOURCE_DIR}/_templates
-o ${CMAKE_CURRENT_SOURCE_DIR}/apidoc/gym-ignition-environments
${PYTHON_PACKAGES_DIR}/gym_ignition_environments
COMMENT "Building apidoc")
add_custom_command(
TARGET apidoc POST_BUILD
COMMAND
${SPHINX_APIDOC_EXECUTABLE} --force
--no-toc --module-first --maxdepth 4
-t ${CMAKE_CURRENT_SOURCE_DIR}/_templates
-o ${CMAKE_CURRENT_SOURCE_DIR}/apidoc/scenario
${BINDINGS_MODULES_DIR}
COMMENT "Building apidoc")
# =============
# SPHINX TARGET
# =============
find_package(Sphinx REQUIRED)
find_package(SphinxMultiVersion REQUIRED)
if(${DOXYGEN_OUTPUT_DIRECTORY} STREQUAL "")
message(FATAL_ERROR "Doxygen was not configured properly")
endif()
set(SPHINX_BUILD ${CMAKE_BINARY_DIR}/html)
set(SPHINX_SOURCE ${CMAKE_CURRENT_SOURCE_DIR})
# Extend the Python path including before generating the website
set(SPHINX_CMD export PYTHONPATH="${PYTHON_PACKAGES_DIR}:${BINDINGS_MODULES_DIR}:$ENV{PYTHONPATH}" &&)
# Sphinx build command
list(APPEND SPHINX_CMD
${SPHINX_MULTIVERSION_EXECUTABLE}
${SPHINX_SOURCE} ${SPHINX_BUILD}
-D breathe_projects.scenario="${DOXYGEN_OUTPUT_DIRECTORY}/xml")
# Generate the website
add_custom_target(sphinx ALL
DEPENDS apidoc
COMMAND ${SPHINX_CMD}
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
COMMENT "Generating documentation with Sphinx")
# Disable GitHub pages autogeneration
add_custom_command(
TARGET sphinx POST_BUILD
COMMAND ${CMAKE_COMMAND} -E touch "${SPHINX_BUILD}/.nojekyll"
COMMENT "Disabling Jekyll in html folder")
# Handle redirect
add_custom_command(
TARGET sphinx POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy "${CMAKE_CURRENT_SOURCE_DIR}/index.html" "${SPHINX_BUILD}"
COMMENT "Copying html redirect to html folder")
================================================
FILE: docs/sphinx/_templates/module.rst_t
================================================
{%- if show_headings %}
{{- [basename, "module"] | join(' ') | e | heading }}
{% endif -%}
.. automodule:: {{ qualname }}
{%- for option in automodule_options %}
:{{ option }}:
{%- endfor %}
================================================
FILE: docs/sphinx/_templates/package.rst_t
================================================
{%- macro automodule(modname, options) -%}
.. automodule:: {{ modname }}
{%- for option in options %}
:{{ option }}:
{%- endfor %}
{%- endmacro %}
{%- macro toctree(docnames) -%}
.. toctree::
:maxdepth: {{ maxdepth }}
{% for docname in docnames %}
{{ docname }}
{%- endfor %}
{%- endmacro %}
{%- if is_namespace %}
{{- [pkgname, "namespace"] | join(" ") | e | heading }}
{% else %}
{{- pkgname | e | heading }}
{% endif %}
{%- if modulefirst and not is_namespace %}
{{ automodule(pkgname, automodule_options) }}
{% endif %}
{%- if subpackages %}
{{ toctree(subpackages) }}
{% endif %}
{%- if submodules %}
{% if separatemodules %}
{{ toctree(submodules) }}
{% else %}
{%- for submodule in submodules %}
{% if show_headings %}
{{- submodule | e | heading(2) }}
{% endif %}
{{ automodule(submodule, automodule_options) }}
{% endfor %}
{%- endif %}
{%- endif %}
{%- if not modulefirst and not is_namespace %}
Module contents
---------------
{{ automodule(pkgname, automodule_options) }}
{% endif %}
================================================
FILE: docs/sphinx/_templates/toc.rst_t
================================================
{{ header | heading }}
.. toctree::
:maxdepth: {{ maxdepth }}
{% for docname in docnames %}
{{ docname }}
{%- endfor %}
================================================
FILE: docs/sphinx/_templates/versioning.html
================================================
{% if versions %}
{% endif %}
================================================
FILE: docs/sphinx/_templates/versions.html
================================================
{%- if current_version %}
{%- endif %}
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.base.rst
================================================
gym\_ignition.base
==================
.. automodule:: gym_ignition.base
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.base.runtime
--------------------------
.. automodule:: gym_ignition.base.runtime
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.base.task
-----------------------
.. automodule:: gym_ignition.base.task
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.context.gazebo.rst
================================================
gym\_ignition.context.gazebo
============================
.. automodule:: gym_ignition.context.gazebo
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.context.gazebo.controllers
----------------------------------------
.. automodule:: gym_ignition.context.gazebo.controllers
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.context.gazebo.plugin
-----------------------------------
.. automodule:: gym_ignition.context.gazebo.plugin
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.context.rst
================================================
gym\_ignition.context
=====================
.. automodule:: gym_ignition.context
:members:
:undoc-members:
:show-inheritance:
.. toctree::
:maxdepth: 4
gym_ignition.context.gazebo
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.randomizers.model.rst
================================================
gym\_ignition.randomizers.model
===============================
.. automodule:: gym_ignition.randomizers.model
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.randomizers.model.sdf
-----------------------------------
.. automodule:: gym_ignition.randomizers.model.sdf
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.randomizers.physics.rst
================================================
gym\_ignition.randomizers.physics
=================================
.. automodule:: gym_ignition.randomizers.physics
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.randomizers.physics.dart
--------------------------------------
.. automodule:: gym_ignition.randomizers.physics.dart
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.randomizers.rst
================================================
gym\_ignition.randomizers
=========================
.. automodule:: gym_ignition.randomizers
:members:
:undoc-members:
:show-inheritance:
.. toctree::
:maxdepth: 4
gym_ignition.randomizers.model
gym_ignition.randomizers.physics
gym\_ignition.randomizers.abc
-----------------------------
.. automodule:: gym_ignition.randomizers.abc
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.randomizers.gazebo\_env\_randomizer
-------------------------------------------------
.. automodule:: gym_ignition.randomizers.gazebo_env_randomizer
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.rbd.idyntree.rst
================================================
gym\_ignition.rbd.idyntree
==========================
.. automodule:: gym_ignition.rbd.idyntree
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.rbd.idyntree.helpers
----------------------------------
.. automodule:: gym_ignition.rbd.idyntree.helpers
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.rbd.idyntree.inverse\_kinematics\_nlp
---------------------------------------------------
.. automodule:: gym_ignition.rbd.idyntree.inverse_kinematics_nlp
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.rbd.idyntree.kindyncomputations
---------------------------------------------
.. automodule:: gym_ignition.rbd.idyntree.kindyncomputations
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.rbd.idyntree.numpy
--------------------------------
.. automodule:: gym_ignition.rbd.idyntree.numpy
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.rbd.rst
================================================
gym\_ignition.rbd
=================
.. automodule:: gym_ignition.rbd
:members:
:undoc-members:
:show-inheritance:
.. toctree::
:maxdepth: 4
gym_ignition.rbd.idyntree
gym\_ignition.rbd.conversions
-----------------------------
.. automodule:: gym_ignition.rbd.conversions
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.rbd.utils
-----------------------
.. automodule:: gym_ignition.rbd.utils
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.rst
================================================
gym\_ignition
=============
.. automodule:: gym_ignition
:members:
:undoc-members:
:show-inheritance:
.. toctree::
:maxdepth: 4
gym_ignition.base
gym_ignition.context
gym_ignition.randomizers
gym_ignition.rbd
gym_ignition.runtimes
gym_ignition.scenario
gym_ignition.utils
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.runtimes.rst
================================================
gym\_ignition.runtimes
======================
.. automodule:: gym_ignition.runtimes
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.runtimes.gazebo\_runtime
--------------------------------------
.. automodule:: gym_ignition.runtimes.gazebo_runtime
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.runtimes.realtime\_runtime
----------------------------------------
.. automodule:: gym_ignition.runtimes.realtime_runtime
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.scenario.rst
================================================
gym\_ignition.scenario
======================
.. automodule:: gym_ignition.scenario
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.scenario.model\_with\_file
----------------------------------------
.. automodule:: gym_ignition.scenario.model_with_file
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.scenario.model\_wrapper
-------------------------------------
.. automodule:: gym_ignition.scenario.model_wrapper
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition/gym_ignition.utils.rst
================================================
gym\_ignition.utils
===================
.. automodule:: gym_ignition.utils
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.utils.logger
--------------------------
.. automodule:: gym_ignition.utils.logger
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.utils.math
------------------------
.. automodule:: gym_ignition.utils.math
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.utils.misc
------------------------
.. automodule:: gym_ignition.utils.misc
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.utils.resource\_finder
------------------------------------
.. automodule:: gym_ignition.utils.resource_finder
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.utils.scenario
----------------------------
.. automodule:: gym_ignition.utils.scenario
:members:
:undoc-members:
:show-inheritance:
gym\_ignition.utils.typing
--------------------------
.. automodule:: gym_ignition.utils.typing
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition-environments/gym_ignition_environments.models.rst
================================================
gym\_ignition\_environments.models
==================================
.. automodule:: gym_ignition_environments.models
:members:
:undoc-members:
:show-inheritance:
gym\_ignition\_environments.models.cartpole
-------------------------------------------
.. automodule:: gym_ignition_environments.models.cartpole
:members:
:undoc-members:
:show-inheritance:
gym\_ignition\_environments.models.icub
---------------------------------------
.. automodule:: gym_ignition_environments.models.icub
:members:
:undoc-members:
:show-inheritance:
gym\_ignition\_environments.models.panda
----------------------------------------
.. automodule:: gym_ignition_environments.models.panda
:members:
:undoc-members:
:show-inheritance:
gym\_ignition\_environments.models.pendulum
-------------------------------------------
.. automodule:: gym_ignition_environments.models.pendulum
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition-environments/gym_ignition_environments.randomizers.rst
================================================
gym\_ignition\_environments.randomizers
=======================================
.. automodule:: gym_ignition_environments.randomizers
:members:
:undoc-members:
:show-inheritance:
gym\_ignition\_environments.randomizers.cartpole
------------------------------------------------
.. automodule:: gym_ignition_environments.randomizers.cartpole
:members:
:undoc-members:
:show-inheritance:
gym\_ignition\_environments.randomizers.cartpole\_no\_rand
----------------------------------------------------------
.. automodule:: gym_ignition_environments.randomizers.cartpole_no_rand
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/gym-ignition-environments/gym_ignition_environments.rst
================================================
gym\_ignition\_environments
===========================
.. automodule:: gym_ignition_environments
:members:
:undoc-members:
:show-inheritance:
.. toctree::
:maxdepth: 4
gym_ignition_environments.models
gym_ignition_environments.randomizers
gym_ignition_environments.tasks
================================================
FILE: docs/sphinx/apidoc/gym-ignition-environments/gym_ignition_environments.tasks.rst
================================================
gym\_ignition\_environments.tasks
=================================
.. automodule:: gym_ignition_environments.tasks
:members:
:undoc-members:
:show-inheritance:
gym\_ignition\_environments.tasks.cartpole\_continuous\_balancing
-----------------------------------------------------------------
.. automodule:: gym_ignition_environments.tasks.cartpole_continuous_balancing
:members:
:undoc-members:
:show-inheritance:
gym\_ignition\_environments.tasks.cartpole\_continuous\_swingup
---------------------------------------------------------------
.. automodule:: gym_ignition_environments.tasks.cartpole_continuous_swingup
:members:
:undoc-members:
:show-inheritance:
gym\_ignition\_environments.tasks.cartpole\_discrete\_balancing
---------------------------------------------------------------
.. automodule:: gym_ignition_environments.tasks.cartpole_discrete_balancing
:members:
:undoc-members:
:show-inheritance:
gym\_ignition\_environments.tasks.pendulum\_swingup
---------------------------------------------------
.. automodule:: gym_ignition_environments.tasks.pendulum_swingup
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/scenario/scenario.bindings.rst
================================================
scenario.bindings
=================
.. automodule:: scenario.bindings
:members:
:undoc-members:
:show-inheritance:
scenario.bindings.core
----------------------
.. automodule:: scenario.bindings.core
:members:
:undoc-members:
:show-inheritance:
scenario.bindings.gazebo
------------------------
.. automodule:: scenario.bindings.gazebo
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/sphinx/apidoc/scenario/scenario.rst
================================================
scenario
========
.. automodule:: scenario
:members:
:undoc-members:
:show-inheritance:
.. toctree::
:maxdepth: 4
scenario.bindings
================================================
FILE: docs/sphinx/breathe/core.rst
================================================
.. _scenario_core:
Core
====
.. doxygennamespace:: scenario::core
:project: scenario
:members:
================================================
FILE: docs/sphinx/breathe/gazebo.rst
================================================
.. _scenario_gazebo:
Gazebo
======
.. doxygennamespace:: scenario::gazebo
:project: scenario
:members:
================================================
FILE: docs/sphinx/conf.py
================================================
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
from datetime import datetime
project = "gym-ignition"
copyright = f"{datetime.now().year}, Istituto Italiano di Tecnologia"
author = "Diego Ferigo"
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.todo",
"sphinx.ext.mathjax",
"sphinx.ext.githubpages",
"sphinx.ext.napoleon",
"sphinx.ext.extlinks",
"sphinx_autodoc_typehints",
"sphinx_multiversion",
"sphinx_fontawesome",
"breathe",
"sphinx_tabs.tabs",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "en"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_book_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"repository_url": "https://github.com/robotology/gym-ignition",
"use_repository_button": True,
"use_issues_button": True,
"use_edit_page_button": True,
"path_to_docs": "docs/sphinx",
"home_page_in_toc": True,
"use_download_button": False,
"use_fullscreen_button": True,
"single_page": False,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "pastie"
# -- Extension configuration -------------------------------------------------
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for breathe extension ----------------------------------------------
breathe_default_project = "scenario"
# -- Options for sphinx_multiversion extension ----------------------------------
# From: https://holzhaus.github.io/sphinx-multiversion
smv_prefer_remote_refs = False
smv_remote_whitelist = r"^(origin|upstream)$"
smv_tag_whitelist = r"^dummy"
smv_branch_whitelist = r"^(master|devel|docs/.*)$"
smv_released_pattern = r"^tags/.*$"
smv_outputdir_format = "{ref.name}"
html_sidebars = {
"**": [
"sbt-sidebar-nav.html",
"versioning.html",
"search-field.html",
"sbt-sidebar-footer.html",
]
}
# -- Options for extlinks extension ----------------------------------
extlinks = {
"issue": ("https://github.com/robotology/gym-ignition/issues/%s", "#"),
"pr": ("https://github.com/robotology/gym-ignition/pull/%s", "#"),
}
================================================
FILE: docs/sphinx/getting_started/gym-ignition.rst
================================================
.. _getting_started_gym_ignition:
gym-ignition
************
The previous sections reported the usage of ScenarIO to perform rigid-body simulations of any kind.
The :py:mod:`gym_ignition` Python package provides boilerplate code that use ScenarIO to create environments
for reinforcement learning research compatible with OpenAI Gym.
Beyond the abstractions provided by ScenarIO, gym-ignition introduces the following:
- :py:class:`~gym_ignition.base.runtime.Runtime`: Base class to abstract the runtime of an environment.
It provides the code that steps the simulator for simulated environments or deals with real-time execution for
environments running on real robots.
The implementation for Ignition Gazebo is :py:class:`~gym_ignition.runtimes.gazebo_runtime.GazeboRuntime`.
- :py:class:`~gym_ignition.base.task.Task`: Base class providing the structure of the decision-making logic.
The code of the task must be independent from the runtime, and only the ScenarIO APIs should be used.
The active runtime will then execute the task on either simulated or real worlds.
- :py:mod:`gym_ignition.randomizers`: Utilities to develop ``gym.Wrapper`` classes that randomize the environment
every rollout.
The implementation for Ignition Gazebo is :py:class:`~gym_ignition.randomizers.gazebo_env_randomizer.GazeboEnvRandomizer`.
- :py:mod:`gym_ignition.rbd`: Utilities commonly used in robotic environments, like inverse kinematics and rigid-body
dynamics algorithms.
Refer to :py:class:`~gym_ignition.rbd.idyntree.inverse_kinematics_nlp.InverseKinematicsNLP` and
:py:class:`~gym_ignition.rbd.idyntree.kindyncomputations.KinDynComputations` for more details.
.. tip::
If you want to learn more about ``iDynTree``, the two classes we mainly use are ``iDynTree::KinDynComputations`` (`docs `__) and ``iDynTree::InverseKinematics`` (`docs `__).
The theory and notation is summarized in `Multibody dynamics notation `_.
You can find demo environments created with ``gym-ignition`` in the
`gym_ignition_environments `_ folder.
These examples show how to structure a new standalone Python package containing the environment with your robots.
For example, taking the cartpole balancing problem with discrete actions,
the components you need to implement are the following:
- A model :py:class:`~gym_ignition_environments.models.cartpole.CartPole`
(`model/cartpole.py `_)
- A task :py:class:`~gym_ignition_environments.tasks.cartpole_discrete_balancing.CartPoleDiscreteBalancing`
(`cartpole_discrete_balancing.py `_)
- A randomizer :py:class:`~gym_ignition_environments.randomizers.cartpole.CartpoleEnvRandomizer`
(`randomizers/cartpole.py `_)
- Environment registration as done in `__init__.py `_
With all these resources in place, you can run a random policy of the environment as shown in
`launch_cartpole.py `_.
================================================
FILE: docs/sphinx/getting_started/manipulation.rst
================================================
.. _getting_started_manipulation:
Manipulation example
********************
This example provides a wider overview of the functionalities currently implemented in gym-ignition.
The code reported below is taken from the example
`panda_pick_and_place.py `_.
It shows the following functionalities:
- Download models from `Ignition Fuel `_.
- Insert and remove models during runtime.
- Exploit the high-level :py:class:`~gym_ignition_environments.models.panda.Panda` helper class to insert the manipulator.
- Enable custom C++ controllers (:cpp:class:`scenario::controllers::ComputedTorqueFixedBase`).
- Detect contacts and read contact wrenches.
- Exploit :py:class:`~gym_ignition.rbd.idyntree.inverse_kinematics_nlp.InverseKinematicsNLP`.
.. figure:: https://user-images.githubusercontent.com/469199/99905096-f29a0f80-2cce-11eb-9f1c-002f6c887bc6.png
:align: center
.. tip::
This example can be the starting point to develop manipulation environments for robot learning.
Visit the :ref:`getting_started_gym_ignition` page for more information about how to wrap this code in the resources provided by ``gym_ignition``.
.. note::
This is an illustrative example. A compliant joint controller is used to move the manipulator, it does not perform any
trajectory planning and it does not avoid self collisions, which are not enabled in the simulation by default.
There are cube positions where the joint configuration changes a lot from the previous, generating an abrupt movement
of the manipulator. When these situations occur, grasping might fail.
.. code-block:: python
import enum
import time
from functools import partial
from typing import List
import gym_ignition
import gym_ignition_environments
import numpy as np
from gym_ignition.context.gazebo import controllers
from gym_ignition.rbd import conversions
from gym_ignition.rbd.idyntree import inverse_kinematics_nlp
from scipy.spatial.transform import Rotation as R
from scenario import core as scenario_core
from scenario import gazebo as scenario_gazebo
# Configure verbosity
scenario_gazebo.set_verbosity(scenario_gazebo.Verbosity_error)
# Configure numpy output
np.set_printoptions(precision=4, suppress=True)
def add_panda_controller(
panda: gym_ignition_environments.models.panda.Panda, controller_period: float
) -> None:
# Set the controller period
assert panda.set_controller_period(period=controller_period)
# Increase the max effort of the fingers
panda.get_joint(
joint_name="panda_finger_joint1"
).to_gazebo().set_max_generalized_force(max_force=500.0)
panda.get_joint(
joint_name="panda_finger_joint2"
).to_gazebo().set_max_generalized_force(max_force=500.0)
# Insert the ComputedTorqueFixedBase controller
assert panda.to_gazebo().insert_model_plugin(
*controllers.ComputedTorqueFixedBase(
kp=[100.0] * (panda.dofs() - 2) + [10000.0] * 2,
ki=[0.0] * panda.dofs(),
kd=[17.5] * (panda.dofs() - 2) + [100.0] * 2,
urdf=panda.get_model_file(),
joints=panda.joint_names(),
).args()
)
# Initialize the controller to the current state
assert panda.set_joint_position_targets(panda.joint_positions())
assert panda.set_joint_velocity_targets(panda.joint_velocities())
assert panda.set_joint_acceleration_targets(panda.joint_accelerations())
def get_panda_ik(
panda: gym_ignition_environments.models.panda.Panda, optimized_joints: List[str]
) -> inverse_kinematics_nlp.InverseKinematicsNLP:
# Create IK
ik = inverse_kinematics_nlp.InverseKinematicsNLP(
urdf_filename=panda.get_model_file(),
considered_joints=optimized_joints,
joint_serialization=panda.joint_names(),
)
# Initialize IK
ik.initialize(
verbosity=1,
floating_base=False,
cost_tolerance=1e-8,
constraints_tolerance=1e-8,
base_frame=panda.base_frame(),
)
# Set the current configuration
ik.set_current_robot_configuration(
base_position=np.array(panda.base_position()),
base_quaternion=np.array(panda.base_orientation()),
joint_configuration=np.array(panda.joint_positions()),
)
# Add the cartesian target of the end effector
end_effector = "end_effector_frame"
ik.add_target(
frame_name=end_effector,
target_type=inverse_kinematics_nlp.TargetType.POSE,
as_constraint=False,
)
return ik
def insert_bucket(world: scenario_gazebo.World) -> scenario_gazebo.Model:
# Insert objects from Fuel
uri = lambda org, name: f"https://fuel.ignitionrobotics.org/{org}/models/{name}"
# Download the cube SDF file
bucket_sdf = scenario_gazebo.get_model_file_from_fuel(
uri=uri(
org="GoogleResearch",
name="Threshold_Basket_Natural_Finish_Fabric_Liner_Small",
),
use_cache=False,
)
# Assign a custom name to the model
model_name = "bucket"
# Insert the model
assert world.insert_model(
bucket_sdf, scenario_core.Pose([0.68, 0, 1.02], [1.0, 0, 0, 1]), model_name
)
# Return the model
return world.get_model(model_name=model_name)
def insert_table(world: scenario_gazebo.World) -> scenario_gazebo.Model:
# Insert objects from Fuel
uri = lambda org, name: f"https://fuel.ignitionrobotics.org/{org}/models/{name}"
# Download the cube SDF file
bucket_sdf = scenario_gazebo.get_model_file_from_fuel(
uri=uri(org="OpenRobotics", name="Table"), use_cache=False
)
# Assign a custom name to the model
model_name = "table"
# Insert the model
assert world.insert_model(bucket_sdf, scenario_core.Pose_identity(), model_name)
# Return the model
return world.get_model(model_name=model_name)
def insert_cube_in_operating_area(
world: scenario_gazebo.World,
) -> scenario_gazebo.Model:
# Insert objects from Fuel
uri = lambda org, name: f"https://fuel.ignitionrobotics.org/{org}/models/{name}"
# Download the cube SDF file
cube_sdf = scenario_gazebo.get_model_file_from_fuel(
uri=uri(org="openrobotics", name="wood cube 5cm"), use_cache=False
)
# Sample a random position
random_position = np.random.uniform(low=[0.2, -0.3, 1.01], high=[0.4, 0.3, 1.01])
# Get a unique name
model_name = gym_ignition.utils.scenario.get_unique_model_name(
world=world, model_name="cube"
)
# Insert the model
assert world.insert_model(
cube_sdf, scenario_core.Pose(random_position, [1.0, 0, 0, 0]), model_name
)
# Return the model
return world.get_model(model_name=model_name)
def solve_ik(
target_position: np.ndarray,
target_orientation: np.ndarray,
ik: inverse_kinematics_nlp.InverseKinematicsNLP,
) -> np.ndarray:
quat_xyzw = R.from_euler(seq="y", angles=90, degrees=True).as_quat()
ik.update_transform_target(
target_name=ik.get_active_target_names()[0],
position=target_position,
quaternion=conversions.Quaternion.to_wxyz(xyzw=quat_xyzw),
)
# Run the IK
ik.solve()
return ik.get_reduced_solution().joint_configuration
def end_effector_reached(
position: np.array,
end_effector_link: scenario_core.Link,
max_error_pos: float = 0.01,
max_error_vel: float = 0.5,
mask: np.ndarray = np.array([1.0, 1.0, 1.0]),
) -> bool:
masked_target = mask * position
masked_current = mask * np.array(end_effector_link.position())
return (
np.linalg.norm(masked_current - masked_target) < max_error_pos
and np.linalg.norm(end_effector_link.world_linear_velocity()) < max_error_vel
)
def get_unload_position(bucket: scenario_core.Model) -> np.ndarray:
return bucket.base_position() + np.array([0, 0, 0.3])
class FingersAction(enum.Enum):
OPEN = enum.auto()
CLOSE = enum.auto()
def move_fingers(
panda: gym_ignition_environments.models.panda.Panda, action: FingersAction
) -> None:
# Get the joints of the fingers
finger1 = panda.get_joint(joint_name="panda_finger_joint1")
finger2 = panda.get_joint(joint_name="panda_finger_joint2")
if action is FingersAction.OPEN:
finger1.set_position_target(position=finger1.position_limit().max)
finger2.set_position_target(position=finger2.position_limit().max)
if action is FingersAction.CLOSE:
finger1.set_position_target(position=finger1.position_limit().min)
finger2.set_position_target(position=finger2.position_limit().min)
# ====================
# INITIALIZE THE WORLD
# ====================
# Get the simulator and the world
gazebo, world = gym_ignition.utils.scenario.init_gazebo_sim(
step_size=0.001, real_time_factor=2.0, steps_per_run=1
)
# Open the GUI
gazebo.gui()
time.sleep(3)
gazebo.run(paused=True)
# Insert the Panda manipulator
panda = gym_ignition_environments.models.panda.Panda(
world=world, position=[-0.1, 0, 1.0]
)
# Disable joint velocity limits
_ = [j.to_gazebo().set_velocity_limit(1_000) for j in panda.joints()]
# Enable contacts only for the finger links
panda.get_link("panda_leftfinger").to_gazebo().enable_contact_detection(True)
panda.get_link("panda_rightfinger").to_gazebo().enable_contact_detection(True)
# Process model insertion in the simulation
gazebo.run(paused=True)
# Monkey patch the class with finger helpers
panda.open_fingers = partial(move_fingers, panda=panda, action=FingersAction.OPEN)
panda.close_fingers = partial(move_fingers, panda=panda, action=FingersAction.CLOSE)
# Add a custom joint controller to the panda
add_panda_controller(panda=panda, controller_period=gazebo.step_size())
# Populate the world
table = insert_table(world=world)
bucket = insert_bucket(world=world)
gazebo.run(paused=True)
# Create and configure IK for the panda
ik_joints = [
j.name() for j in panda.joints() if j.type is not scenario_core.JointType_fixed
]
ik = get_panda_ik(panda=panda, optimized_joints=ik_joints)
# Get some manipulator links
finger_left = panda.get_link(link_name="panda_leftfinger")
finger_right = panda.get_link(link_name="panda_rightfinger")
end_effector_frame = panda.get_link(link_name="end_effector_frame")
while True:
# Insert a new cube
cube = insert_cube_in_operating_area(world=world)
gazebo.run(paused=True)
# =========================
# PHASE 1: Go over the cube
# =========================
print("Hovering")
# Position over the cube
position_over_cube = np.array(cube.base_position()) + np.array([0, 0, 0.4])
# Get the joint configuration that brings the EE over the cube
over_joint_configuration = solve_ik(
target_position=position_over_cube,
target_orientation=np.array(cube.base_orientation()),
ik=ik,
)
# Set the joint references
assert panda.set_joint_position_targets(over_joint_configuration, ik_joints)
# Open the fingers
panda.open_fingers()
# Run the simulation until the EE reached the desired position
while not end_effector_reached(
position=position_over_cube,
end_effector_link=end_effector_frame,
max_error_pos=0.05,
max_error_vel=0.5,
):
gazebo.run()
# Wait a bit more
[gazebo.run() for _ in range(500)]
# =======================
# PHASE 2: Reach the cube
# =======================
print("Reaching")
# Get the joint configuration that brings the EE to the cube
over_joint_configuration = solve_ik(
target_position=np.array(cube.base_position()) + np.array([0, 0, 0.04]),
target_orientation=np.array(cube.base_orientation()),
ik=ik,
)
# Set the joint references
assert panda.set_joint_position_targets(over_joint_configuration, ik_joints)
panda.open_fingers()
# Run the simulation until the EE reached the desired position
while not end_effector_reached(
position=np.array(cube.base_position()) + np.array([0, 0, 0.04]),
end_effector_link=end_effector_frame,
):
gazebo.run()
# Wait a bit more
[gazebo.run() for _ in range(500)]
# =======================
# PHASE 3: Grasp the cube
# =======================
print("Grasping")
# Close the fingers
panda.close_fingers()
# Detect a graps reading the contact wrenches of the finger links
while not (
np.linalg.norm(finger_left.contact_wrench()) >= 50.0
and np.linalg.norm(finger_right.contact_wrench()) >= 50.0
):
gazebo.run()
# =============
# PHASE 4: Lift
# =============
print("Lifting")
# Position over the cube
position_over_cube = np.array(cube.base_position()) + np.array([0, 0, 0.4])
# Get the joint configuration that brings the EE over the cube
over_joint_configuration = solve_ik(
target_position=position_over_cube,
target_orientation=np.array(cube.base_orientation()),
ik=ik,
)
# Set the joint references
assert panda.set_joint_position_targets(over_joint_configuration, ik_joints)
# Run the simulation until the EE reached the desired position
while not end_effector_reached(
position=position_over_cube,
end_effector_link=end_effector_frame,
max_error_pos=0.1,
max_error_vel=0.5,
):
gazebo.run()
# Wait a bit more
[gazebo.run() for _ in range(500)]
# =====================================
# PHASE 5: Place the cube in the bucket
# =====================================
print("Dropping")
# Get the joint configuration that brings the EE over the bucket
unload_joint_configuration = solve_ik(
target_position=get_unload_position(bucket=bucket),
target_orientation=np.array([0, 1.0, 0, 0]),
ik=ik,
)
# Set the joint references
assert panda.set_joint_position_targets(unload_joint_configuration, ik_joints)
# Run the simulation until the EE reached the desired position
while not end_effector_reached(
position=get_unload_position(bucket=bucket)
+ np.random.uniform(low=-0.05, high=0.05, size=3),
end_effector_link=end_effector_frame,
max_error_pos=0.01,
max_error_vel=0.1,
mask=np.array([1, 1, 0]),
):
gazebo.run()
# Open the fingers
panda.open_fingers()
# Wait that both fingers are in not contact (with the cube)
while finger_left.in_contact() or finger_right.in_contact():
gazebo.run()
# Wait a bit more
[gazebo.run() for _ in range(500)]
# Remove the cube
world.remove_model(model_name=cube.name())
# It is always a good practice to close the simulator.
# In this case it is not required since above there is an infinite loop.
# gazebo.close()
================================================
FILE: docs/sphinx/getting_started/scenario.rst
================================================
.. _getting_started_scenario:
ScenarIO
========
In this getting started section we show how to use the Gazebo ScenarIO library to simulate a pendulum system.
We will use the models of the ground plane and the pendulum stored in the repository
`gym_ignition_models `_.
The final outcome of this section is shown in the following GIF:
.. figure:: https://user-images.githubusercontent.com/469199/99263551-a9097a80-281f-11eb-98de-714c69385b06.png
:align: center
.. tip::
We fully support `Ignition Fuel `_, a constantly enlarging database of SDF models.
You can use :py:meth:`~scenario.bindings.gazebo.get_model_file_from_fuel` to download any model of the database:
.. code-block:: python
from scenario import gazebo as scenario_gazebo
model_name = "Electrical Box"
model_file = scenario_gazebo.get_model_file_from_fuel(
uri=f"https://fuel.ignitionrobotics.org/openrobotics/models/{model_name}")
.. _getting_started_scenario_python:
Python
******
.. tabs::
.. group-tab:: example.py
.. code-block:: python
import time
import gym_ignition_models
from scenario import gazebo as scenario_gazebo
# Create the simulator
gazebo = scenario_gazebo.GazeboSimulator(step_size=0.001,
rtf=1.0,
steps_per_run=1)
# Initialize the simulator
gazebo.initialize()
# Get the default world and insert the ground plane
world = gazebo.get_world()
world.insert_model(gym_ignition_models.get_model_file("ground_plane"))
# Select the physics engine
world.set_physics_engine(scenario_gazebo.PhysicsEngine_dart)
# Open the GUI
gazebo.gui()
time.sleep(3)
gazebo.run(paused=True)
# Insert a pendulum
world.insert_model(gym_ignition_models.get_model_file("pendulum"))
gazebo.run(paused=True)
time.sleep(3)
# Get the pendulum model
pendulum = world.get_model("pendulum")
# Reset the pole position
pendulum.get_joint("pivot").to_gazebo().reset_position(0.01)
gazebo.run(paused=True)
time.sleep(3)
# Simulate 30 seconds
for _ in range(int(30.0 / gazebo.step_size())):
gazebo.run()
# Close the simulator
time.sleep(5)
gazebo.close()
.. _getting_started_scenario_cpp:
C++
***
.. tabs::
.. group-tab:: example.cpp
.. code-block:: cpp
#include
#include
#include
#include
#include
#include
#include
int main(int argc, char* argv[])
{
// Create the simulator
auto gazebo = scenario::gazebo::GazeboSimulator(
/*stepSize=*/0.001, /*rtf=*/1.0, /*stepsPerRun=*/1);
// Initialize the simulator
gazebo.initialize();
// Get the default world
auto world = gazebo.getWorld();
// Insert the ground plane
const std::string groundPlaneSDF = "ground_plane.sdf";
world->insertModel(groundPlaneSDF);
// Select the physics engine
world->setPhysicsEngine(scenario::gazebo::PhysicsEngine::Dart);
// Open the GUI
gazebo.gui();
std::this_thread::sleep_for(std::chrono::seconds(3));
gazebo.run(/*paused=*/true);
// Insert a pendulum
const std::string pendulumURDF = "pendulum.urdf";
world->insertModel(/*modelFile=*/pendulumURDF);
gazebo.run(/*paused=*/true);
// Get the pendulum
auto pendulum = world->getModel(/*modelName=*/"pendulum");
// Reset the pole position
auto pivot = pendulum->getJoint("pivot");
auto pivotGazebo = std::static_pointer_cast(pivot);
pivotGazebo->resetPosition(0.001);
// Simulate 30 seconds
for (size_t i = 0; i < 30.0 / gazebo.stepSize(); ++i) {
gazebo.run();
}
// Close the simulator
std::this_thread::sleep_for(std::chrono::seconds(3));
gazebo.close();
return 0;
}
.. group-tab:: CMakeLists.txt
.. code-block:: cmake
cmake_minimum_required(VERSION 3.16)
project(ExampleWithScenario VERSION 1.0)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
find_package(Scenario COMPONENTS Gazebo REQUIRED)
add_executable(ExampleWithScenario example.cpp)
target_link_libraries(ExampleWithScenario PRIVATE
ScenarioGazebo::ScenarioGazebo
ScenarioGazebo::GazeboSimulator)
.. note::
The environment should be properly configured to find the plugins and the models.
Use ``IGN_GAZEBO_SYSTEM_PLUGIN_PATH`` for the plugins and ``IGN_GAZEBO_RESOURCE_PATH`` for the models and meshes.
================================================
FILE: docs/sphinx/index.html
================================================
Redirecting to master branch
================================================
FILE: docs/sphinx/index.rst
================================================
.. _scenario_and_gym_ignition:
ScenarIO and gym-ignition
=========================
This project targets both *control* and *robot learning* research domains:
- Researchers in robotics and control can simulate their robots with familiar tools like Gazebo and URDF/SDF,
without the need to rely on any middleware.
- Researchers in robot learning can quickly develop new robotic environments that can scale to hundreds of parallel instances.
We provide two related subprojects to each of these categories:
1. **ScenarIO** provides APIs to interface with the robots.
2. **gym-ignition** helps structuring environments compatible with OpenAI Gym,
while minimizing boilerplate code and providing common rigid-body dynamics utilities.
Check the sections :ref:`What is ScenarIO ` and
:ref:`What is gym-ignition ` for more details,
and visit :ref:`Motivations ` for an extended overview.
For a quick practical introduction, visit the :ref:`Getting Started ` page.
If you use this project for your research, please check the FAQ about :ref:`how to give credit `.
.. list-table::
* - |pendulum_swing|
- |panda_grasping|
- |icub_stepping|
.. |icub_stepping| image:: https://user-images.githubusercontent.com/469199/99262746-9e021a80-281e-11eb-9df1-d70134b0801a.png
.. |panda_grasping| image:: https://user-images.githubusercontent.com/469199/99263111-0cdf7380-281f-11eb-9cfe-338b2aae0503.png
.. |pendulum_swing| image:: https://user-images.githubusercontent.com/469199/99262383-321fb200-281e-11eb-89cc-cc31f590daa3.png
.. toctree::
:hidden:
:maxdepth: 1
:caption: What
what/what_is_scenario
what/what_is_gym_ignition
.. toctree::
:hidden:
:maxdepth: 1
:caption: Why
why/motivations
why/why_scenario
why/why_ignition_gazebo
why/why_gym_ignition
.. toctree::
:hidden:
:maxdepth: 1
:caption: Installation (How)
installation/support_policy
installation/stable
installation/nightly
installation/developer
.. toctree::
:hidden:
:maxdepth: 1
:caption: Getting Started
getting_started/scenario
getting_started/manipulation
getting_started/gym-ignition
.. toctree::
:hidden:
:maxdepth: 2
:caption: ScenarIO C++ API:
breathe/core
breathe/gazebo
.. toctree::
:hidden:
:maxdepth: 2
:caption: Python Packages
apidoc/scenario/scenario.bindings
apidoc/gym-ignition/gym_ignition
apidoc/gym-ignition-environments/gym_ignition_environments
.. toctree::
:hidden:
:maxdepth: 2
:caption: Information
info/faq
info/limitations
================================================
FILE: docs/sphinx/info/faq.rst
================================================
FAQ
===
.. _faq_citation:
How to give credit?
-------------------
If you use **ScenarIO** or **gym-ignition** for your research,
please cite the following reference:
.. code-block:: bibtex
:caption: BibTeX entry
@INPROCEEDINGS{ferigo2020gymignition,
title={Gym-Ignition: Reproducible Robotic Simulations for Reinforcement Learning},
author={D. {Ferigo} and S. {Traversaro} and G. {Metta} and D. {Pucci}},
booktitle={2020 IEEE/SICE International Symposium on System Integration (SII)},
year={2020},
pages={885-890},
doi={10.1109/SII46433.2020.9025951}
}
Interaction with Tensorflow
---------------------------
If your Python application imports both ``scenario`` and ``tensorflow``,
you might experience segfaults with no error messages.
Likely the problem is the `protobuf `_ library.
In fact, both Tensorflow and Ignition Gazebo link agains protobuf, but while Gazebo uses the
default version of your OS, Tensorflow vendors a more recent version.
If you import ``scenario`` before ``tensorflow``, the system protobuf is loaded, and
Tensorflow will segfault.
The only workaround we found is importing Tensorflow first:
>>> import tensorflow
>>> import scenario.bindings.gazebo
Ogre2 and OpenGL
----------------
On GNU/Linux distributions that ship an old OpenGL version, the GUI could fail to open printing
error like *Unable to create the rendering window*.
The reason is that Ignition Gazebo has `ogre-next `_
(also known as ogre2) as default rendering engine, and it requires OpenGL greater than 3.3.
You can find more details `here `_.
The workaround we recommend is modifying the file ``~/.ignition/gazebo/gui.config`` as follows:
.. code-block:: diff
--- .ignition/gazebo/gui.config 2020-06-04 14:41:33.471804733 +0200
+++ .ignition/gazebo/gui.config 2020-06-04 14:42:47.826475035 +0200
@@ -30,7 +30,7 @@
falsedocked
- ogre2
+ ogrescene0.4 0.4 0.40.8 0.8 0.8
After this modification, world SDF files that do no specifically ask ``ogre2`` will use
``ogre`` as default rendering engine which works also with older OpenGL versions.
Default SDF world
-----------------
If not specified differently, the :cpp:class:`~scenario::gazebo::GazeboSimulator`
class uses a default world file that is completely empty, without even the ground plane.
You can load a custom SDF world using :cpp:func:`~scenario::gazebo::GazeboSimulator::insertWorldFromSDF()` and then
extracting it from the simulator with :cpp:func:`~scenario::gazebo::GazeboSimulator::getWorld()` using its name.
.. code-block:: xml
:caption: ``empty.world``: use this file as starting point for your custom world
true0 0 10 0 0 00.8 0.8 0.8 10.2 0.2 0.2 110000.90.010.001-0.5 0.1 -0.9
.. note::
If you don't specify any GUI configuration, the default ``~/.ignition/gazebo/gui.config`` is used.
This is the preferred approach since it's easier to maintain and keep world files updated.
You can find more information in the upstream `default.sdf `_.
.. tip::
You don't need to add the physics plugin in the world file. You can use
:cpp:func:`scenario::gazebo::World::setPhysicsEngine()` from your code.
You can also load other plugins during runtime using
:cpp:func:`scenario::gazebo::World::insertWorldPlugin()` and
:cpp:func:`scenario::gazebo::Model::insertModelPlugin()`.
================================================
FILE: docs/sphinx/info/limitations.rst
================================================
Limitations
===========
Sensors support
---------------
Ignition Gazebo supports a wide variety of `sensors `_,
like cameras, lidars, ...
However, ScenarI/O and consequently gym-ignition are not yet able to extract data from sensors.
Follow :issue:`199` if you're interested in sensors support.
Performance
-----------
When operating on a single model, DART provides good enough performance and accuracy for most of the use-cases.
However, we noticed that it does not perform well when many models are inserted in the simulated world,
especially in contact-rich scenarios.
If your aim is performing simulations for robot learning, we recommend running multiple instances of the simulator,
each of them operating on a world containing a single robot.
If your aim is simulating a big world where the controlled model can move inside the scene, exploiting a new feature
of Ignition Gazebo called `levels `_
could be the proper solution. This feature is exploited by the big worlds of `subt `_.
Instead, if your world has many models and the usage of levels does not apply to your use-case, you can try to switch
the physics backend to an engine that handles better your simulation.
.. note::
At the time of writing only DART is officially supported.
There is some recent activity to implement the bullet physics engine, but this back-and is not yet ready.
As last resort, you can implement a new physics backend following the
`instructions `_.
================================================
FILE: docs/sphinx/installation/developer.rst
================================================
.. _installation_developer:
Developer Installation
======================
This installation type is intended for those that want to contribute to the development of the project.
Compared to :ref:`Stable ` and :ref:`Nightly `,
this installation type provides a simplified setup for development, VCS integration, and debugging.
Depending on whether you want to target the ``Stable`` or ``Nightly`` channels,
you have to clone respectively the ``master`` or ``devel`` branch.
Check our :ref:`support policy ` to select the right distribution of Ignition to install.
Dependencies
************
The developer installation requires finding in the system other dependencies not required by the other installation types.
In those cases, we rely on setuptools to download, install, and find all the necessary third-party dependencies.
1. **iDynTree**: ``gym_ignition`` provides helper classes to manipulate the kinematics and the dynamics of rigid-bodies.
Among the many existing solutions, we selected the algorithms implemented in `iDynTree `_.
Follow the `official installation instructions `__ and make sure
that you also enable and install the `Python bindings `__.
You can verify that the installation succeeded and your system is properly configured if you can ``import idyntree.bindings`` in a Python interpreter.
C++ Project
***********
From the root of the repository and from the branch you are interested, you can configure, compile, and install
the CMake project as follows:
.. code-block:: bash
cd scenario/
cmake -S . -B build
cmake --build build/
cmake --build build/ --target install
.. note::
The default install prefix of the CMake project is ``/usr/local``.
If you want to use a different folder, pass ``-DCMAKE_INSTALL_PREFIX=/new/install/prefix`` to the first ``cmake`` command.
.. attention::
The SWIG bindings are installed in the `site-packages `_
folder of the active Python interpreter.
If you have an active virtual environment, it will be automatically detected.
We rely on CMake's logic for detecting Python,
visit `FindPython3 `_ for more details.
.. include:: virtual_environment.rst
Editable Python Installation
****************************
Install the Python package in `editable mode `_.
From the root of the repository:
.. code-block:: bash
pip install -e scenario/
pip install -e .
The editable installation only symlinks the resources of the repository into the active Python installation.
It allows to develop directly operating on the files of the repository and use the updated package without requiring
to install it again.
.. note::
The ``scenario`` editable installation is just a placeholder.
It is necessary to prevent the editable installation of ``gym-ignition`` to override the resources installed by
the manual CMake execution.
Otherwise, the ``scenario`` package from PyPI would be pulled, resulting with a wrong version.
.. include:: system_configuration.rst
================================================
FILE: docs/sphinx/installation/nightly.rst
================================================
.. _installation_nightly:
Nightly
=======
The nightly channel contains the most recent updates of the project.
As described in the :ref:`support policy `, this channel requires building Ignition from sources.
We publish updated nightly packages after any pull request merged in the ``devel`` branch.
.. include:: virtual_environment.rst
PyPI Package
************
We provide two different packages for ScenarIO and gym-ignition.
If you are interested in the ScenarIO package,
install the `scenario `_ package from PyPI:
.. code-block:: bash
pip install --pre scenario
Instead, if you are interested in gym-ignition,
install the `gym-ignition `_ package from PyPI:
.. code-block:: bash
pip install --pre scenario gym-ignition
Note that in this case, specifying also the ``scenario`` dependency is necessary,
otherwise ``pip`` will pull the stable package from PyPI.
.. include:: system_configuration.rst
================================================
FILE: docs/sphinx/installation/stable.rst
================================================
.. _installation_stable:
Stable
======
The stable channel is the easiest way to setup your system.
As described in the :ref:`support policy `, this channel allows installing Ignition from binary packages.
We publish updated stable packages after any tagged release of the ``master`` branch.
.. include:: virtual_environment.rst
PyPI Package
************
We provide two different packages for ScenarIO and gym-ignition.
If you are interested in the ScenarIO package,
install the `scenario `_ package from PyPI:
.. code-block:: bash
pip install scenario
Instead, if you are interested in gym-ignition,
install the `gym-ignition `_ package from PyPI:
.. code-block:: bash
pip install gym-ignition
It will download and install also ``scenario`` since it depends on it.
================================================
FILE: docs/sphinx/installation/support_policy.rst
================================================
.. _support_policy:
Support policy
==============
**gym-ignition** is an hybrid C++ and Python project and it requires finding in the system updated compile and runtime
dependencies, depending on the installation type you select.
The project mostly supports all the major operating systems.
However, we are currently using and testing only GNU/Linux systems.
We do not yet provide official support to other operating systems.
The table below recaps the project requirements of the :ref:`Stable ` and :ref:`Nightly ` channels:
+-------------+-----------------+--------+----------------------+----------+------------+---------+
| Channel | C++ | Python | Ignition | Ubuntu | macOS [*]_ | Windows |
+=============+=================+========+======================+==========+============+=========+
| **Stable** | >= gcc8, clang6 | >= 3.8 | `Fortress`_ (binary) | >= 20.04 | No | No |
+-------------+-----------------+--------+----------------------+----------+------------+---------+
| **Nightly** | >= gcc8, clang6 | >= 3.8 | `Fortress`_ (source) | >= 20.04 | No | No |
+-------------+-----------------+--------+----------------------+----------+------------+---------+
.. _`Fortress`: https://ignitionrobotics.org/docs/fortress/install
.. [*] Ignition officially supports macOS and also ``gym-ignition`` could be installed on this platform.
However, we do not currently test this configuration and we cannot guarantee support.
.. important::
Our policy is to support Ubuntu from the most recent LTS distribution, currently Ubuntu 20.04 Focal.
We typically switch to a new LTS when the first minor release ``YY.MM.1`` is released.
The Python and compilers policies follow a similar approach, we try to keep them updated as much as
possible following what the supported LTS distribution includes.
.. note::
External contributions to extend the support and provide feedback about other platforms are most welcome.
.. admonition:: Fun fact
In the same spirit of `ubuntu/+bug/1 `_, we have our own :issue:`1`.
================================================
FILE: docs/sphinx/installation/system_configuration.rst
================================================
System Configuration
********************
This section applies only to the installations that require building Ignition from sources.
If you installed Ignition from sources, you likely used ``colcon`` and therefore the entire suite was installed in a custom folder called workspace.
The workspace contains all the shared libraries and executables of Ignition, including the plugins loaded during runtime.
Since we cannot know in advance where you created the workspace, ``gym-ignition`` is not able to find the physics plugins.
After you enabled the workspace by sourcing its bash script, you may need to also export the following variable:
.. code-block:: bash
export IGN_GAZEBO_PHYSICS_ENGINE_PATH=${IGN_GAZEBO_PHYSICS_ENGINE_PATH}:${COLCON_PREFIX_PATH}/lib/ign-physics-3/engine-plugins/
Make sure to select the folder corresponding to the correct version of ign-physics, otherwise the wrong plugins are found.
================================================
FILE: docs/sphinx/installation/virtual_environment.rst
================================================
Virtual Environment (optional)
******************************
This step is optional but highly recommended.
Visit the `virtual environments `_ documentation for more details.
.. code-block:: bash
sudo apt install virtualenv
virtualenv -p python3.8 $HOME/venv
source $HOME/venv/bin/activate
Note that the activation is temporary and it is valid only in the same terminal.
Make sure to execute the next steps in a terminal where the virtual environment is active.
================================================
FILE: docs/sphinx/what/what_is_gym_ignition.rst
================================================
.. _what_is_gym_ignition:
What is gym-ignition?
=====================
**gym-ignition** is a framework to create **reproducible robotics environments** for reinforcement learning research.
It is based on the :ref:`ScenarIO ` project which provides the low-level APIs to interface with the Ignition Gazebo simulator.
By default, RL environments share a lot of boilerplate code, e.g. for initializing the simulator or structuring the classes
to expose the ``gym.Env`` interface.
Gym-ignition provides the :py:class:`~gym_ignition.base.task.Task` and :py:class:`~gym_ignition.base.runtime.Runtime`
abstractions that help you focusing on the development of the decision-making logic rather than engineering.
It includes :py:mod:`~gym_ignition.randomizers` to simplify the implementation of domain randomization
of models, physics, and tasks.
Gym-ignition also provides powerful dynamics algorithms compatible with both fixed-base and floating-based robots by
exploiting `iDynTree `_ and exposing
high-level functionalities (:py:mod:`~gym_ignition.rbd.idyntree`).
Gym-ignition does not provide out-of-the-box environments ready to be used.
Rather, its aim is simplifying and streamlining their development.
Nonetheless, for illustrative purpose, it includes canonical examples in the
:py:mod:`gym_ignition_environments` package.
================================================
FILE: docs/sphinx/what/what_is_scenario.rst
================================================
.. _what_is_scenario:
What is ScenarIO
================
**ScenarIO** is a C++ abstraction layer to interact with simulated and real robots.
It mainly provides the following
`C++ interfaces `_:
- :cpp:class:`scenario::core::World`
- :cpp:class:`scenario::core::Model`
- :cpp:class:`scenario::core::Link`
- :cpp:class:`scenario::core::Joint`
These interfaces can be implemented to operate on different scenarios,
including robots operating on either simulated worlds or in real-time.
ScenarIO currently fully implements **Gazebo ScenarIO** (APIs),
a simulated back-end that interacts with `Ignition Gazebo `_.
The result allows stepping the simulator programmatically, ensuring a fully reproducible behaviour.
It relates closely to other projects like
`pybullet `_ and `mujoco-py `_.
A real-time backend that interacts with the `YARP `_ middleware is under development.
ScenarIO can be used either from C++ (:ref:`Core APIs `, :ref:`Gazebo APIs `)
or from Python (:py:mod:`~scenario.bindings.core`, :py:mod:`~scenario.bindings.gazebo`).
If you're interested to know the reasons why we started developing ScenarIO and why we selected Ignition Gazebo
for our simulations, visit the :ref:`Motivations ` section.
================================================
FILE: docs/sphinx/why/motivations.rst
================================================
.. _motivations:
Intro
=====
In this section we recap the motivations behind this project.
Choosing the right framework for your research is often challenging and we hope to provide a broader look that could
help deciding whether it meets your needs or not.
The development of the framework is evolving quickly, and the architecture and motivations described in the
:ref:`reference publication ` are becoming outdated.
While that publication remains the reference in case you use the project for your research,
this section provides a constantly updated description aligned with the most recent development news.
================================================
FILE: docs/sphinx/why/why_gym_ignition.rst
================================================
.. _why_gym_ignition:
Why gym-ignition
================
In the previous sections we described why we developed ScenarIO and why we used Ignition Gazebo to implement its simulation back-end.
While we mentioned few advantages for the robot learning domain, ScenarIO remains a general C++ library that can be used for generic robotic applications.
The reinforcement learning community, in the past years, converged towards Python for the development of the environments
containing the decision-making logic.
`OpenAI Gym `_ became the reference interface to provide a clear separation between agents and environments.
A widespread interface is powerful because if you implement an environment that exposes the ``gym.Env`` interface, you can then use
all the countless frameworks provided by the community to train a policy selecting your favourite algorithm.
The Python package ``gym_ignition`` enables you to create robotic environments compatible with OpenAI Gym.
Being based on ScenarIO, it enables to develop environments that can run not only on different physics engines,
but also on real robots.
You can think of ``gym_ignition`` as a way to help you structuring your environment.
If you know how `pytorch-lightning `_ relates to PyTorch,
the same applies to the interaction between gym-ignition and ScenarIO.
Thanks to the :py:class:`~gym_ignition.base.task.Task` and :py:class:`~gym_ignition.base.runtime.Runtime` interfaces,
``gym_ignition`` abstracts away all the unnecessary boilerplate that otherwise you have to copy and paste between environments.
For example, :py:class:`~gym_ignition.runtimes.gazebo_runtime.GazeboRuntime` provides all boilerplate code to take
your implementation of a :py:class:`~gym_ignition.base.task.Task` and simulate it with Ignition Gazebo.
Furthermore, we provide useful classes with functionalities that are commonly required by robotic environments, like
Inverse Kinematic (:py:class:`~gym_ignition.rbd.idyntree.inverse_kinematics_nlp.InverseKinematicsNLP`)
and multibody dynamics algorithms (:py:class:`~gym_ignition.rbd.idyntree.kindyncomputations.KinDynComputations`)
with full support of floating-base systems.
.. note::
Developing environments for robot learning in C++ is a valid choice and the community has shown different examples.
ScenarIO can be used to develop C++ environments as well, however we find more useful using Python since it allows
a faster prototyping.
.. note::
To the best of our knowledge, the first package that implemented a structure that abstracts the task, the robot, and
the simulator is `openai_ros `_.
We have been inspired by its structure in the early stage of development, and the current interfaces implemented in
gym-ignition are an evolution of the original architecture.
================================================
FILE: docs/sphinx/why/why_ignition_gazebo.rst
================================================
.. _why_ignition_gazebo:
Why Ignition Gazebo
===================
In this section we want to go a bit deeper in the motivations that led us to select Ignition Gazebo as target solution for the simulation back-end of ScenarIO.
To begin, a bit of history. The `Gazebo `_ simulator that the entire robotic community has used for
over a decade is different than Ignition Gazebo.
We will refer to the old simulator as Gazebo Classic.
Ignition Gazebo is the new generation of the simulator.
It takes all the lesson learned by the development of Gazebo Classic and provides a more modular and extensible framework for robotic simulations.
The monolithic structure of Gazebo Classic has been broken in standalone libraries, obtaining a *suite* called `Ignition `_.
Ignition Gazebo is just one of the libraries [*]_ that compose the suite.
When we started the development of gym-ignition, Ignition Gazebo was "stable" enough to start using it.
The clear advantage is support: Gazebo Classic will just receive bug fixing, all the development effort now shifted towards the new Ignition Gazebo.
The price to pay, instead, is that Ignition Gazebo has not yet reached feature parity with Gazebo Classic, even though
the gap is getting filled quickly.
.. [*] Yes, you read correctly: *library*. Ignition Gazebo is a library.
A ``ignition::gazebo::Server`` object can be instantiated and it can be stepped programmatically from your code.
This type of architecture became quite popular recently because it gives you full control of the simulation.
Ignition Gazebo, therefore, became a solution similar to the well known alternatives like ``pybullet`` and ``mujoco-py``.
We have been and currently are Gazebo Classic users, as many other robotics research labs.
Over time, we became familiar with the tools and methods of Gazebo Classic and built a lot of code and applications that depend on it.
Unfortunately, despite someone has shown attempts, Gazebo Classic is not suitable for the large-scale simulations that are
typical in modern reinforcement learning architectures.
Ignition Gazebo offered us a viable solution for this use case that allows us to exploit the know-how we gained with Gazebo Classic.
The two main features that drove us towards the adoption of Ignition Gazebo are the following:
1. **Physics engines are loaded as plugin libraries and Ignition Gazebo operates on an API-stable interface.**
This architecture allows everyone to implement a new physics engine backend and run simulations exploiting all the other
components of the Ignition suite (rendering, systems, ...).
While today only `DART `_ is officially supported, we believe this is one of the best
attempts to obtain in the long run a framework that allows to switch physics engines with minimal effort.
For reinforcement learning research, it could bring domain randomization to the next level.
2. **Simulations can be stepped programmatically without relying on network transport, guaranteeing full reproducibility.**
Reproducible simulations are paramount whether you are prototyping a new robot controller or you are running
large-scale simulations for robot learning.
Most of the client-server architectures cannot guarantee reproducibility since asynchronous network transports could
provide different results depending on the load of your system.
An effective solution is using the simulator as a library and stepping it programmatically from your code.
Gazebo ScenarIO provides APIs to perform these kind of simulations with Ignition Gazebo.
There are a bunch of other nice features we didn't cover in this section.
Not all of them are currently exposed to ScenarIO Gazebo, please open a feature request if you have any suggestion or,
even better, fire up a pull request!
To summarize, these are the features that motivated us to choose Ignition Gazebo:
- Simulator developed for robotics
- Simulator-as-a-library structure
- Abstraction of different physics engines and rendering engines
- Modular software architecture
- Powerful and constantly improving SDF model description
- Well maintained, packaged, and widely tested
- Large big database of objects to create worlds: `Ignition Fuel `_
- Long term vision and support
.. note::
Ignition Gazebo is the target simulator of the new `DARPA Subterranean Challenge `_.
Have a look to their simulation results to understand what you can expect from using Ignition Gazebo.
================================================
FILE: docs/sphinx/why/why_scenario.rst
================================================
.. _why_scenario:
Why ScenarIO
============
*SCENe interfAces for Robot Input/Output* is an abstraction layer to interface with robots.
It exposes APIs to interact with a scene, providing a :cpp:class:`~scenario::core::World` that can return
:cpp:class:`~scenario::core::Model` objects, from which you can gather measurements and send commands.
The relevant APIs of ScenarIO are documented in the :ref:`Scenario Core ` section.
Many simulators already provide an abstraction of different physics engines.
They expose a unified interface that, after selecting the desired back-end, maps its unified methods to those of the
underlying physics engine. The aim of ScenarIO is extending this idea also to real-time robots.
The simulation backend of ScenarIO communicates with a simulator that itself abstracts physics engines.
This is powerful because, in this way, ScenarIO is independent from the details of the underlying physics engine.
Any current and future physics engine supported by the simulator is compatible with ScenarIO without requiring any
change from our side.
Regarding real-time robots, the APIs of ScenarIO can be implemented exploiting middlewares like ROS or YARP.
At the time of writing there are no official real-time backends, stay tuned for further updates.
Once both the simulation and real-time backends are in place, you can then write code to control your robot just once,
it will interact either with the simulated or the real robot depending on the ScenarIO backend you enabled.
.. note::
The ScenarIO interface is flexible and generic.
Let's say you already have built your functionality with the backends we provide,
and you are not happy from the performance of the simulator we used.
You can implement your own simulation backend and run it alongside those we provide.
The same applies to the real-time backend, in case your robot uses a custom middleware or SDK.
.. tip::
So far, we always referred to the C++ abstraction layer provided by ScenarIO.
The interface / implementation pattern is implemented with classic inheritance and polymorphism.
Having such unified interface simplifies the process to expose it to other languages.
Thanks to SWIG, we officially provide Python bindings of ScenarIO,
so that you can prototype your applications even faster!
================================================
FILE: examples/panda_pick_and_place.py
================================================
import enum
import time
from functools import partial
from typing import List
import gym_ignition
import gym_ignition_environments
import numpy as np
from gym_ignition.context.gazebo import controllers
from gym_ignition.rbd import conversions
from gym_ignition.rbd.idyntree import inverse_kinematics_nlp
from scipy.spatial.transform import Rotation as R
from scenario import core as scenario_core
from scenario import gazebo as scenario_gazebo
# Configure verbosity
scenario_gazebo.set_verbosity(scenario_gazebo.Verbosity_error)
# Configure numpy output
np.set_printoptions(precision=4, suppress=True)
def add_panda_controller(
panda: gym_ignition_environments.models.panda.Panda, controller_period: float
) -> None:
# Set the controller period
assert panda.set_controller_period(period=controller_period)
# Increase the max effort of the fingers
panda.get_joint(
joint_name="panda_finger_joint1"
).to_gazebo().set_max_generalized_force(max_force=500.0)
panda.get_joint(
joint_name="panda_finger_joint2"
).to_gazebo().set_max_generalized_force(max_force=500.0)
# Insert the ComputedTorqueFixedBase controller
assert panda.to_gazebo().insert_model_plugin(
*controllers.ComputedTorqueFixedBase(
kp=[100.0] * (panda.dofs() - 2) + [10000.0] * 2,
ki=[0.0] * panda.dofs(),
kd=[17.5] * (panda.dofs() - 2) + [100.0] * 2,
urdf=panda.get_model_file(),
joints=panda.joint_names(),
).args()
)
# Initialize the controller to the current state
assert panda.set_joint_position_targets(panda.joint_positions())
assert panda.set_joint_velocity_targets(panda.joint_velocities())
assert panda.set_joint_acceleration_targets(panda.joint_accelerations())
def get_panda_ik(
panda: gym_ignition_environments.models.panda.Panda, optimized_joints: List[str]
) -> inverse_kinematics_nlp.InverseKinematicsNLP:
# Create IK
ik = inverse_kinematics_nlp.InverseKinematicsNLP(
urdf_filename=panda.get_model_file(),
considered_joints=optimized_joints,
joint_serialization=panda.joint_names(),
)
# Initialize IK
ik.initialize(
verbosity=1,
floating_base=False,
cost_tolerance=1e-8,
constraints_tolerance=1e-8,
base_frame=panda.base_frame(),
)
# Set the current configuration
ik.set_current_robot_configuration(
base_position=np.array(panda.base_position()),
base_quaternion=np.array(panda.base_orientation()),
joint_configuration=np.array(panda.joint_positions()),
)
# Add the cartesian target of the end effector
end_effector = "end_effector_frame"
ik.add_target(
frame_name=end_effector,
target_type=inverse_kinematics_nlp.TargetType.POSE,
as_constraint=False,
)
return ik
def insert_bucket(world: scenario_gazebo.World) -> scenario_gazebo.Model:
# Insert objects from Fuel
uri = lambda org, name: f"https://fuel.ignitionrobotics.org/{org}/models/{name}"
# Download the cube SDF file
bucket_sdf = scenario_gazebo.get_model_file_from_fuel(
uri=uri(
org="GoogleResearch",
name="Threshold_Basket_Natural_Finish_Fabric_Liner_Small",
),
use_cache=False,
)
# Assign a custom name to the model
model_name = "bucket"
# Insert the model
assert world.insert_model(
bucket_sdf, scenario_core.Pose([0.68, 0, 1.02], [1.0, 0, 0, 1]), model_name
)
# Return the model
return world.get_model(model_name=model_name)
def insert_table(world: scenario_gazebo.World) -> scenario_gazebo.Model:
# Insert objects from Fuel
uri = lambda org, name: f"https://fuel.ignitionrobotics.org/{org}/models/{name}"
# Download the cube SDF file
bucket_sdf = scenario_gazebo.get_model_file_from_fuel(
uri=uri(org="OpenRobotics", name="Table"), use_cache=False
)
# Assign a custom name to the model
model_name = "table"
# Insert the model
assert world.insert_model(bucket_sdf, scenario_core.Pose_identity(), model_name)
# Return the model
return world.get_model(model_name=model_name)
def insert_cube_in_operating_area(
world: scenario_gazebo.World,
) -> scenario_gazebo.Model:
# Insert objects from Fuel
uri = lambda org, name: f"https://fuel.ignitionrobotics.org/{org}/models/{name}"
# Download the cube SDF file
cube_sdf = scenario_gazebo.get_model_file_from_fuel(
uri=uri(org="openrobotics", name="wood cube 5cm"), use_cache=False
)
# Sample a random position
random_position = np.random.uniform(low=[0.2, -0.3, 1.01], high=[0.4, 0.3, 1.01])
# Get a unique name
model_name = gym_ignition.utils.scenario.get_unique_model_name(
world=world, model_name="cube"
)
# Insert the model
assert world.insert_model(
cube_sdf, scenario_core.Pose(random_position, [1.0, 0, 0, 0]), model_name
)
# Return the model
return world.get_model(model_name=model_name)
def solve_ik(
target_position: np.ndarray,
target_orientation: np.ndarray,
ik: inverse_kinematics_nlp.InverseKinematicsNLP,
) -> np.ndarray:
quat_xyzw = R.from_euler(seq="y", angles=90, degrees=True).as_quat()
ik.update_transform_target(
target_name=ik.get_active_target_names()[0],
position=target_position,
quaternion=conversions.Quaternion.to_wxyz(xyzw=quat_xyzw),
)
# Run the IK
ik.solve()
return ik.get_reduced_solution().joint_configuration
def end_effector_reached(
position: np.array,
end_effector_link: scenario_core.Link,
max_error_pos: float = 0.01,
max_error_vel: float = 0.5,
mask: np.ndarray = np.array([1.0, 1.0, 1.0]),
) -> bool:
masked_target = mask * position
masked_current = mask * np.array(end_effector_link.position())
return (
np.linalg.norm(masked_current - masked_target) < max_error_pos
and np.linalg.norm(end_effector_link.world_linear_velocity()) < max_error_vel
)
def get_unload_position(bucket: scenario_core.Model) -> np.ndarray:
return bucket.base_position() + np.array([0, 0, 0.3])
class FingersAction(enum.Enum):
OPEN = enum.auto()
CLOSE = enum.auto()
def move_fingers(
panda: gym_ignition_environments.models.panda.Panda, action: FingersAction
) -> None:
# Get the joints of the fingers
finger1 = panda.get_joint(joint_name="panda_finger_joint1")
finger2 = panda.get_joint(joint_name="panda_finger_joint2")
if action is FingersAction.OPEN:
finger1.set_position_target(position=finger1.position_limit().max)
finger2.set_position_target(position=finger2.position_limit().max)
if action is FingersAction.CLOSE:
finger1.set_position_target(position=finger1.position_limit().min)
finger2.set_position_target(position=finger2.position_limit().min)
# ====================
# INITIALIZE THE WORLD
# ====================
# Get the simulator and the world
gazebo, world = gym_ignition.utils.scenario.init_gazebo_sim(
step_size=0.001, real_time_factor=2.0, steps_per_run=1
)
# Open the GUI
gazebo.gui()
time.sleep(3)
gazebo.run(paused=True)
# Insert the Panda manipulator
panda = gym_ignition_environments.models.panda.Panda(
world=world, position=[-0.1, 0, 1.0]
)
# Disable joint velocity limits
_ = [j.to_gazebo().set_velocity_limit(1_000) for j in panda.joints()]
# Enable contacts only for the finger links
panda.get_link("panda_leftfinger").to_gazebo().enable_contact_detection(True)
panda.get_link("panda_rightfinger").to_gazebo().enable_contact_detection(True)
# Process model insertion in the simulation
gazebo.run(paused=True)
# Monkey patch the class with finger helpers
panda.open_fingers = partial(move_fingers, panda=panda, action=FingersAction.OPEN)
panda.close_fingers = partial(move_fingers, panda=panda, action=FingersAction.CLOSE)
# Add a custom joint controller to the panda
add_panda_controller(panda=panda, controller_period=gazebo.step_size())
# Populate the world
table = insert_table(world=world)
bucket = insert_bucket(world=world)
gazebo.run(paused=True)
# Create and configure IK for the panda
ik_joints = [
j.name() for j in panda.joints() if j.type is not scenario_core.JointType_fixed
]
ik = get_panda_ik(panda=panda, optimized_joints=ik_joints)
# Get some manipulator links
finger_left = panda.get_link(link_name="panda_leftfinger")
finger_right = panda.get_link(link_name="panda_rightfinger")
end_effector_frame = panda.get_link(link_name="end_effector_frame")
while True:
# Insert a new cube
cube = insert_cube_in_operating_area(world=world)
gazebo.run(paused=True)
# =========================
# PHASE 1: Go over the cube
# =========================
print("Hovering")
# Position over the cube
position_over_cube = np.array(cube.base_position()) + np.array([0, 0, 0.4])
# Get the joint configuration that brings the EE over the cube
over_joint_configuration = solve_ik(
target_position=position_over_cube,
target_orientation=np.array(cube.base_orientation()),
ik=ik,
)
# Set the joint references
assert panda.set_joint_position_targets(over_joint_configuration, ik_joints)
# Open the fingers
panda.open_fingers()
# Run the simulation until the EE reached the desired position
while not end_effector_reached(
position=position_over_cube,
end_effector_link=end_effector_frame,
max_error_pos=0.05,
max_error_vel=0.5,
):
gazebo.run()
# Wait a bit more
[gazebo.run() for _ in range(500)]
# =======================
# PHASE 2: Reach the cube
# =======================
print("Reaching")
# Get the joint configuration that brings the EE to the cube
over_joint_configuration = solve_ik(
target_position=np.array(cube.base_position()) + np.array([0, 0, 0.04]),
target_orientation=np.array(cube.base_orientation()),
ik=ik,
)
# Set the joint references
assert panda.set_joint_position_targets(over_joint_configuration, ik_joints)
panda.open_fingers()
# Run the simulation until the EE reached the desired position
while not end_effector_reached(
position=np.array(cube.base_position()) + np.array([0, 0, 0.04]),
end_effector_link=end_effector_frame,
):
gazebo.run()
# Wait a bit more
[gazebo.run() for _ in range(500)]
# =======================
# PHASE 3: Grasp the cube
# =======================
print("Grasping")
# Close the fingers
panda.close_fingers()
# Detect a graps reading the contact wrenches of the finger links
while not (
np.linalg.norm(finger_left.contact_wrench()) >= 50.0
and np.linalg.norm(finger_right.contact_wrench()) >= 50.0
):
gazebo.run()
# =============
# PHASE 4: Lift
# =============
print("Lifting")
# Position over the cube
position_over_cube = np.array(cube.base_position()) + np.array([0, 0, 0.4])
# Get the joint configuration that brings the EE over the cube
over_joint_configuration = solve_ik(
target_position=position_over_cube,
target_orientation=np.array(cube.base_orientation()),
ik=ik,
)
# Set the joint references
assert panda.set_joint_position_targets(over_joint_configuration, ik_joints)
# Run the simulation until the EE reached the desired position
while not end_effector_reached(
position=position_over_cube,
end_effector_link=end_effector_frame,
max_error_pos=0.1,
max_error_vel=0.5,
):
gazebo.run()
# Wait a bit more
[gazebo.run() for _ in range(500)]
# =====================================
# PHASE 5: Place the cube in the bucket
# =====================================
print("Dropping")
# Get the joint configuration that brings the EE over the bucket
unload_joint_configuration = solve_ik(
target_position=get_unload_position(bucket=bucket),
target_orientation=np.array([0, 1.0, 0, 0]),
ik=ik,
)
# Set the joint references
assert panda.set_joint_position_targets(unload_joint_configuration, ik_joints)
# Run the simulation until the EE reached the desired position
while not end_effector_reached(
position=get_unload_position(bucket=bucket)
+ np.random.uniform(low=-0.05, high=0.05, size=3),
end_effector_link=end_effector_frame,
max_error_pos=0.01,
max_error_vel=0.1,
mask=np.array([1, 1, 0]),
):
gazebo.run()
# Open the fingers
panda.open_fingers()
# Wait that both fingers are in not contact (with the cube)
while finger_left.in_contact() or finger_right.in_contact():
gazebo.run()
# Wait a bit more
[gazebo.run() for _ in range(500)]
# Remove the cube
world.remove_model(model_name=cube.name())
# It is always a good practice to close the simulator.
# In this case it is not required since above there is an infinite loop.
# gazebo.close()
================================================
FILE: examples/python/launch_cartpole.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import functools
import time
import gym
from gym_ignition.utils import logger
from gym_ignition_environments import randomizers
# Set verbosity
logger.set_level(gym.logger.ERROR)
# logger.set_level(gym.logger.DEBUG)
# Available tasks
env_id = "CartPoleDiscreteBalancing-Gazebo-v0"
# env_id = "CartPoleContinuousBalancing-Gazebo-v0"
# env_id = "CartPoleContinuousSwingup-Gazebo-v0"
def make_env_from_id(env_id: str, **kwargs) -> gym.Env:
import gym
import gym_ignition_environments
return gym.make(env_id, **kwargs)
# Create a partial function passing the environment id
make_env = functools.partial(make_env_from_id, env_id=env_id)
# Wrap the environment with the randomizer.
# This is a simple example no randomization are applied.
env = randomizers.cartpole_no_rand.CartpoleEnvNoRandomizations(env=make_env)
# Wrap the environment with the randomizer.
# This is a complex example that randomizes both the physics and the model.
# env = randomizers.cartpole.CartpoleEnvRandomizer(
# env=make_env, seed=42, num_physics_rollouts=5)
# Enable the rendering
# env.render('human')
# Initialize the seed
env.seed(42)
for epoch in range(10):
# Reset the environment
observation = env.reset()
# Initialize returned values
done = False
totalReward = 0
while not done:
# Execute a random action
action = env.action_space.sample()
observation, reward, done, _ = env.step(action)
# Render the environment.
# It is not required to call this in the loop if physics is not randomized.
# env.render('human')
# Accumulate the reward
totalReward += reward
# Print the observation
msg = ""
for value in observation:
msg += "\t%.6f" % value
logger.debug(msg)
print(f"Reward episode #{epoch}: {totalReward}")
env.close()
time.sleep(5)
================================================
FILE: pyproject.toml
================================================
[build-system]
build-backend = "setuptools.build_meta"
requires = [
"wheel",
"setuptools>=45",
"setuptools_scm[toml]>=6.0",
"cmake-build-extension",
]
[tool.setuptools_scm]
local_scheme = "dirty-tag"
[tool.black]
line-length = 88
[tool.isort]
profile = "black"
multi_line_output = 3
================================================
FILE: python/gym_ignition/__init__.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
# Workaround for https://github.com/osrf/sdformat/issues/227.
# It has to be done before loading the bindings.
import gym_ignition_models
gym_ignition_models.setup_environment()
# Add IGN_GAZEBO_RESOURCE_PATH to the default search path
import os
from gym_ignition.utils import resource_finder
if "IGN_GAZEBO_RESOURCE_PATH" in os.environ:
resource_finder.add_path_from_env_var("IGN_GAZEBO_RESOURCE_PATH")
def initialize_verbosity() -> None:
import gym
import gym_ignition.utils.logger
import scenario
if scenario.detect_install_mode() is scenario.InstallMode.Developer:
gym_ignition.utils.logger.set_level(
level=gym.logger.INFO, scenario_level=gym.logger.WARN
)
elif scenario.detect_install_mode() is scenario.InstallMode.User:
gym_ignition.utils.logger.set_level(
level=gym.logger.WARN, scenario_level=gym.logger.WARN
)
else:
raise ValueError(scenario.detect_install_mode())
# Configure default verbosity
initialize_verbosity()
================================================
FILE: python/gym_ignition/base/__init__.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
# Abstract classes
from . import runtime, task
================================================
FILE: python/gym_ignition/base/runtime.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
import gym
from gym_ignition.base.task import Task
class Runtime(gym.Env, abc.ABC):
"""
Base class for defining executors of :py:class:`~gym_ignition.base.task.Task` objects.
:py:class:`~gym_ignition.base.task.Task` classes are supposed to be generic and are
not tied to any specific runtime. Implementations of a runtime class contain all the
logic to define how to execute the task, allowing to run the same
:py:class:`~gym_ignition.base.task.Task` class on different simulators or in a
real-time setting.
Runtimes are the real :py:class:`gym.Env` objects returned to the users when they call
the :py:class:`gym.make` factory method.
Args:
task: the :py:class:`~gym_ignition.base.task.Task` object to handle.
agent_rate: the rate at which the environment will be called. Sometimes tasks need
to know this information.
Example:
Here is minimal example of how the :py:class:`Runtime`, :py:class:`gym.Env` and
:py:class:`~gym_ignition.base.task.Task` could be integrated:
.. code-block:: python
class FooRuntime(Runtime):
def __init__(self, task):
super().__init__(task=task, agent_rate=agent_rate)
self.action_space, self.observation_space = self.task.create_spaces()
def reset(self):
self.task.reset_task()
return self.task.get_observation()
def step(self, action):
self.task.set_action(action)
# [...] code that performs the real step [...]
done = self.task.is_done()
reward = self.task.get_reward()
observation = self.task.get_observation()
return observation, reward, done, {}
def close(self):
pass
Note:
Runtimes can handle only one :py:class:`~gym_ignition.base.task.Task` object.
"""
def __init__(self, task: Task, agent_rate: float):
#: Task handled by the runtime.
self.task: Task = task
#: Rate of environment execution.
self.agent_rate = agent_rate
@abc.abstractmethod
def timestamp(self) -> float:
"""
Return the timestamp associated to the execution of the environment.
In real-time environments, the timestamp is the time read from the host system.
In simulated environments, the timestamp is the simulated time, which might not
match the real-time in the case of a real-time factor different than 1.
Returns:
The current environment timestamp.
"""
================================================
FILE: python/gym_ignition/base/task.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
from typing import Dict, Tuple
import gym
import numpy as np
from gym.utils import seeding
from gym_ignition.utils.typing import (
Action,
ActionSpace,
Observation,
ObservationSpace,
Reward,
SeedList,
)
from scenario import core
class Task(abc.ABC):
"""
Interface to define a decision-making task.
The Task is the central interface of each environment implementation.
It defines the logic of the environment in a format that is agnostic of both the
runtime (either simulated or real-time) and the models it operates on.
:py:class:`~gym_ignition.base.runtime.Runtime` instances are the real objects returned
to the users when they call :py:class:`gym.make`. Depending on the type of the
runtime, it could contain one or more :py:class:`Task` objects.
The :py:class:`~gym_ignition.base.runtime.Runtime` is a relay class that calls the
logic of the :py:class:`Task` from its interface methods and implements the real
:py:meth:`gym.Env.step`.
Simulated runtimes step the physics engine, instead, real-time
runtimes, enforce real-time execution.
A :py:class:`Task` object is meant to be:
- Independent from the selected :py:class:`~gym_ignition.base.runtime.Runtime`.
In fact, it defines only the decision making logic;
- Independent from the :py:class:`~scenario.core.Model` objects it operates on.
This is achieved thanks to the model abstraction provided by
:cpp:class:`scenario::core::Model`.
The population of the world where the task operates is demanded to a
:py:class:`gym.Wrapper` object, that acts as an environment randomizer.
"""
action_space: gym.spaces.Space = None
observation_space: gym.spaces.Space = None
def __init__(self, agent_rate: float) -> None:
# World object
self._world = None
#: Rate of the agent.
#: It matches the rate at which the :py:class:`Gym.Env` methods are called.
self.agent_rate = agent_rate
#: RNG available to the object to ensure reproducibility.
#: Use it for all the random resources.
self.np_random: np.random.RandomState
#: The seed of the task
self.seed: int
# Initialize the RNG and the seed
self.np_random, self.seed = seeding.np_random()
# ==========
# PROPERTIES
# ==========
@property
def world(self) -> core.World:
"""
Get the world where the task is operating.
Returns:
The world object.
"""
if self._world is not None:
return self._world
raise Exception("The world was never stored")
@world.setter
def world(self, world: core.World) -> None:
if world is None or world.name == "":
raise ValueError("World not valid")
# Store the world
self._world = world
def has_world(self) -> bool:
"""
Check if the world was stored.
Returns:
True if the task has a valid world, False otherwise.
"""
return self._world is not None and self._world.name != ""
# ==============
# Task Interface
# ==============
@abc.abstractmethod
def create_spaces(self) -> Tuple[ActionSpace, ObservationSpace]:
"""
Create the action and observations spaces.
Note:
This method does not currently have access to the Models part of the
environment. If the Task is meant to work on different models, we recommend
using their URDF / SDF model to extract the information you need
(e.g. number of DoFs, joint position limits, etc). Since actions and
observations are often normalized, in many cases there's no need to extract
a lot of information from the model file.
Raises:
RuntimeError: In case of failure.
Returns:
A tuple containing the action and observation spaces.
"""
@abc.abstractmethod
def reset_task(self) -> None:
"""
Reset the task.
This method contains the logic for resetting the task.
It is called in the :py:meth:`gym.Env.reset` method of the corresponding
environment.
Raises:
RuntimeError: In case of failure.
"""
@abc.abstractmethod
def set_action(self, action: Action) -> None:
"""
Set the task action.
This method contains the logic for setting the environment action.
It is called in the beginning of the :py:meth:`gym.Env.step` method.
Args:
action: The action to set.
Raises:
RuntimeError: In case of failure.
"""
@abc.abstractmethod
def get_observation(self) -> Observation:
"""
Return the task observation.
This method contains the logic for constructing the environment observation.
It is called in the end of both :py:meth:`gym.Env.reset` and
:py:meth:`gym.Env.step` methods.
Raises:
RuntimeError: In case of failure.
Returns:
The task observation.
"""
@abc.abstractmethod
def get_reward(self) -> Reward:
"""
Return the task reward.
This method contains the logic for computing the environment reward.
It is called in the end of the :py:meth:`gym.Env.step` method.
Raises:
RuntimeError: In case of failure.
Returns:
The scalar reward.
"""
@abc.abstractmethod
def is_done(self) -> bool:
"""
Return the task termination flag.
This method contains the logic for defining when the environment has terminated.
Subsequent calls to :py:meth:`Task.set_action` should be preceded by a task
reset through :py:meth:`Task.reset_task`.
It is called in the end of the :py:meth:`gym.Env.step` method.
Raises:
RuntimeError: In case of failure.
Returns:
True if the environment terminated, False otherwise.
"""
def get_info(self) -> Dict:
"""
Return the info dictionary.
Returns:
A ``dict`` with extra information of the task.
"""
return {}
def seed_task(self, seed: int = None) -> SeedList:
"""
Seed the task.
This method configures the :py:attr:`Task.np_random` RNG.
Args:
seed: The seed number.
Return:
The list of seeds used by the task.
"""
# Create the seed if not passed
seed = np.random.randint(2 ** 32 - 1) if seed is None else seed
# Get an instance of the random number generator from gym utils.
# This is necessary to have an independent rng for each environment.
self.np_random, self.seed = seeding.np_random(seed)
# Seed the spaces
self.action_space.seed(self.seed)
self.observation_space.seed(self.seed)
return SeedList([self.seed])
================================================
FILE: python/gym_ignition/context/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import gazebo
================================================
FILE: python/gym_ignition/context/gazebo/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import controllers, plugin
================================================
FILE: python/gym_ignition/context/gazebo/controllers.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from dataclasses import dataclass, field
from typing import Iterable, List, Tuple
from gym_ignition.context.gazebo import plugin
GRAVITY = (0, 0, -9.80665)
@dataclass
class ComputedTorqueFixedBase(plugin.GazeboPlugin):
urdf: str
kp: List[float]
ki: List[float]
kd: List[float]
joints: List[str]
gravity: Tuple[float, float, float] = field(default_factory=lambda: GRAVITY)
# Private fields
_name: str = field(init=False, repr=False, default="ComputedTorqueFixedBase")
_plugin_name: str = field(init=False, repr=False, default="ControllerRunner")
_plugin_class: str = field(
init=False, repr=False, default="scenario::plugins::gazebo::ControllerRunner"
)
def to_xml(self) -> str:
xml = f"""
{self._to_str(self.kp)}{self._to_str(self.ki)}{self._to_str(self.kd)}{self.urdf}{self._to_str(self.joints)}{self._to_str(self.gravity)}
"""
return xml
@staticmethod
def _to_str(iterable: Iterable) -> str:
return " ".join([str(el) for el in iterable])
================================================
FILE: python/gym_ignition/context/gazebo/plugin.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
from dataclasses import dataclass, field
from typing import Tuple
# Default SDF version used in the serialized XML context
SDF_VERSION = 1.7
# Read the following for more information about dataclasses internals:
# https://florimond.dev/blog/articles/2018/10/reconciling-dataclasses-and-properties-in-python/
@dataclass
class GazeboPlugin(abc.ABC):
"""
Base class of all World and Model plugins for Ignition Gazebo.
The Plugin abstract class provides boilerplate code that simplifies and streamlines
the definition of helper classes that insert Ignition Gazebo plugins to either World
or Model objects.
Classes that inherit from Plugin have to provide the following information:
1) All the properties of the plugin as dataclass fields
2) The private specification of the plugin (plugin name and plugin class)
3) Optionally: the serialized XML context
Example:
.. code-block:: python
plugin = MyPlugin(my_property=42)
model = MyModel(world=my_world)
model.insert_model_plugin(*plugin.args())
"""
_plugin_name: str = field(init=False, repr=False)
_plugin_class: str = field(init=False, repr=False)
def to_xml(self) -> str:
"""
Get the XML plugin content.
Returns:
The XML plugin content.
"""
return ""
def args(self) -> Tuple[str, str, str]:
"""
Get the arguments passed to the ScenarI/O methods used to insert plugins.
Returns:
A tuple with the args required to insert the plugin.
"""
return (
str(self._plugin_name),
str(self._plugin_class),
GazeboPlugin.wrap_in_sdf(self.to_xml()),
)
@staticmethod
def wrap_in_sdf(context: str) -> str:
"""
Wrap the XML context inside a SDF root.
Args:
context: The XML string with the plugin's context.
Returns:
The plugin's context wrapped inside a SDF root.
"""
return f"""{context}"""
================================================
FILE: python/gym_ignition/randomizers/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import abc, gazebo_env_randomizer, model, physics
================================================
FILE: python/gym_ignition/randomizers/abc.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
import gym_ignition.base.task
from scenario import core as scenario_core
class TaskRandomizer(abc.ABC):
@abc.abstractmethod
def randomize_task(self, task: gym_ignition.base.task.Task, **kwargs) -> None:
"""
Randomize a :py:class:`~gym_ignition.base.task.Task` instance.
Args:
task: the task to randomize.
Note:
Note that each task has a :py:attr:`~gym_ignition.base.task.Task.world`
property that provides access to the simulated
:py:class:`scenario.bindings.core.World`.
"""
pass
class PhysicsRandomizer(abc.ABC):
"""
Abstract class that provides the machinery for randomizing the physics of a Task.
Args:
randomize_after_rollouts_num: defines after many rollouts physics should be
randomized (i.e. the amount of times :py:meth:`gym.Env.reset` is called).
"""
def __init__(self, randomize_after_rollouts_num: int = 0):
self._rollout_counter = randomize_after_rollouts_num
self.randomize_after_rollouts_num = randomize_after_rollouts_num
@abc.abstractmethod
def randomize_physics(self, task: gym_ignition.base.task.Task, **kwargs) -> None:
"""
Method that insert and configures the physics of a Task's world.
By default this method loads a plugin that uses DART with no randomizations.
Randomizing physics engine parameters or changing physics engine backend could be
done by redefining this method and passing it to
:py:class:`~gym_ignition.runtimes.gazebo_runtime.GazeboRuntime`.
Args:
task: A task containing a world object without physics.
"""
pass
@abc.abstractmethod
def get_engine(self):
"""
Return the physics engine to use for the rollout.
Note:
Supported physics engines:
- :py:const:`scenario.bindings.gazebo.PhysicsEngine_dart`
Return:
The desired physics engine to set in the world.
"""
pass
def increase_rollout_counter(self) -> None:
"""
Increase the rollouts counter.
"""
if self.randomize_after_rollouts_num != 0:
assert self._rollout_counter != 0
self._rollout_counter -= 1
def physics_expired(self) -> bool:
"""
Checks if the physics needs to be randomized.
Return:
True if the physics has expired, false otherwise.
"""
if self.randomize_after_rollouts_num == 0:
return False
if self._rollout_counter == 0:
self._rollout_counter = self.randomize_after_rollouts_num
return True
return False
class ModelRandomizer(abc.ABC):
@abc.abstractmethod
def randomize_model(
self, task: gym_ignition.base.task.Task, **kwargs
) -> scenario_core.Model:
"""
Randomize the model.
Args:
task: The task that operates on the model to randomize.
Return:
The randomized model.
"""
pass
class ModelDescriptionRandomizer(abc.ABC):
@abc.abstractmethod
def randomize_model_description(
self, task: gym_ignition.base.task.Task, **kwargs
) -> str:
"""
Randomize the model description.
Args:
task: The task that operates on the model description to randomize.
Return:
A string with the randomized model description.
"""
pass
================================================
FILE: python/gym_ignition/randomizers/gazebo_env_randomizer.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
from typing import Callable, Dict, Optional, Union, cast
import gym
from gym_ignition.randomizers.abc import PhysicsRandomizer, TaskRandomizer
from gym_ignition.randomizers.physics import dart
from gym_ignition.runtimes import gazebo_runtime
from gym_ignition.utils import typing
MakeEnvCallable = Callable[[Optional[Dict]], gym.Env]
class GazeboEnvRandomizer(gym.Wrapper, TaskRandomizer, abc.ABC):
"""
Base class to implement an environment randomizer for Ignition Gazebo.
The randomizer is a :py:class:`gym.Wrapper` that extends the
:py:meth:`gym.Env.reset` method. Objects that inherit from this class are used to
setup the environment for the handled :py:class:`~gym_ignition.base.task.Task`.
In its simplest form, a randomizer populates the world with all the models that need
to be part of the simulation. The task could then operate on them from a
:py:class:`~scenario.core.Model` object.
More complex environments may require to randomize one or more simulated entities.
Concrete classes that implement a randomizer could use
:py:class:`~gym_ignition.randomizers.model.sdf.SDFRandomizer` to randomize the model
and :py:class:`~gym_ignition.randomizers.abc.PhysicsRandomizer` to randomize
the physics.
Args:
env: Defines the environment to handle. This argument could be either the string
id if the environment does not need to be registered or a function that
returns an environment object.
physics_randomizer: Object that randomizes physics. The default physics engine is
DART with no randomizations.
Note:
In order to randomize physics, the handled
:py:class:`scenario.gazebo.GazeboSimulator` is destroyed and created again.
This operation is demanding, consider randomizing physics at a low rate.
"""
def __init__(
self,
env: Union[str, MakeEnvCallable],
physics_randomizer: PhysicsRandomizer = dart.DART(),
**kwargs,
):
# Print the extra kwargs
gym.logger.debug(f"GazeboEnvRandomizer: {dict(kwargs=kwargs)}")
# Store the options
self._env_option = env
self._kwargs = dict(**kwargs, physics_engine=physics_randomizer.get_engine())
# Create the environment
env_to_wrap = self._create_environment(env=self._env_option, **self._kwargs)
# Initialize the wrapper
gym.Wrapper.__init__(self, env=env_to_wrap)
# Store the physics randomizer
self._physics_randomizer = physics_randomizer
# ===============
# gym.Env methods
# ===============
def reset(self, **kwargs) -> typing.Observation:
# Reset the physics
if self._physics_randomizer.physics_expired():
# Get the random components of the task
seed = self.env.task.seed
np_random = self.env.task.np_random
# Reset the runtime + task, creating a new Gazebo instance
self.env.close()
del self.env
self.env = self._create_environment(self._env_option, **self._kwargs)
# Restore the random components
self.env.seed(seed=seed)
assert self.env.task.seed == seed
self.env.task.np_random = np_random
# Mark the beginning of a new rollout
self._physics_randomizer.increase_rollout_counter()
# Reset the task through the TaskRandomizer
self.randomize_task(task=self.env.task, gazebo=self.env.gazebo, **kwargs)
ok_paused_run = self.env.gazebo.run(paused=True)
if not ok_paused_run:
raise RuntimeError("Failed to execute a paused Gazebo run")
# Reset the Task
return self.env.reset()
# ===============
# Private methods
# ===============
def _create_environment(
self, env: Union[str, MakeEnvCallable], **kwargs
) -> gazebo_runtime.GazeboRuntime:
if isinstance(env, str):
env_to_wrap = self._create_from_id(env_id=env, **kwargs)
elif callable(env):
env_to_wrap = self._create_from_callable(make_env=env, **kwargs)
else:
raise ValueError("The type of env object was not recognized")
if not isinstance(env_to_wrap.unwrapped, gazebo_runtime.GazeboRuntime):
raise ValueError("The environment to wrap is not a GazeboRuntime")
return cast(gazebo_runtime.GazeboRuntime, env_to_wrap)
@staticmethod
def _create_from_callable(make_env: MakeEnvCallable, **kwargs) -> gym.Env:
env = make_env(**kwargs)
return env
@staticmethod
def _create_from_id(env_id: str, **kwargs) -> gym.Env:
env = gym.make(env_id, **kwargs)
return env
================================================
FILE: python/gym_ignition/randomizers/model/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import sdf
================================================
FILE: python/gym_ignition/randomizers/model/sdf.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from enum import Enum, auto
from pathlib import Path
from typing import Dict, List, NamedTuple, Union
import numpy as np
from lxml import etree
class Distribution(Enum):
Uniform = auto()
Gaussian = auto()
class Method(Enum):
Absolute = auto()
Additive = auto()
Coefficient = auto()
class RandomizationData(NamedTuple):
xpath: str
distribution: str
parameters: "DistributionParameters"
method: Method
ignore_zeros: bool = False
force_positive: bool = False
element: etree.Element = None
class GaussianParams(NamedTuple):
variance: float
mean: float = None
class UniformParams(NamedTuple):
low: float
high: float
DistributionParameters = Union[UniformParams, GaussianParams]
class RandomizationDataBuilder:
"""
Builder class of a :py:class:`~gym_ignition.randomizers.model.sdf.RandomizationData`
object.
Args:
randomizer: The :py:class:`~gym_ignition.randomizers.model.sdf.SDFRandomizer`
object to which the created randomization will be inserted.
"""
def __init__(self, randomizer: "SDFRandomizer"):
self.storage: Dict = {}
self.randomizer = randomizer
def at_xpath(self, xpath: str) -> "RandomizationDataBuilder":
"""
Set the XPath pattern associated to the randomization.
Args:
xpath: The XPath pattern.
Returns:
The randomization builder to allow chaining methods.
"""
self.storage["xpath"] = xpath
return self
def sampled_from(
self, distribution: Distribution, parameters: DistributionParameters
) -> "RandomizationDataBuilder":
"""
Set the distribution associated to the randomization.
Args:
distribution: The desired distribution.
parameters: The namedtuple with the parameters of the distribution.
Returns:
The randomization builder to allow chaining methods.
"""
self.storage["distribution"] = distribution
self.storage["parameters"] = parameters
if self.storage["distribution"] is Distribution.Gaussian and not isinstance(
parameters, GaussianParams
):
raise ValueError("Wrong parameters type")
if self.storage["distribution"] is Distribution.Uniform and not isinstance(
parameters, UniformParams
):
raise ValueError("Wrong parameters type")
return self
def method(self, method: Method) -> "RandomizationDataBuilder":
"""
Set the randomization method.
Args:
method: The desired randomization method.
Returns:
The randomization builder to allow chaining methods.
"""
self.storage["method"] = method
return self
def ignore_zeros(self, ignore_zeros: bool) -> "RandomizationDataBuilder":
"""
Ignore the randomization of values that are zero.
If the value to randomize has a default value of 0 in the SDF, when this method
is chained the randomization is skipped. In the case of a multi-match XPath
pattern, the values that are not zero are not skipped.
Args:
ignore_zeros: True if zeros should be ignored, false otherwise.
Returns:
The randomization builder to allow chaining methods.
"""
self.storage["ignore_zeros"] = ignore_zeros
return self
def force_positive(self, force_positive: bool = True) -> "RandomizationDataBuilder":
"""
Force the randomized value to be greater than zero.
This option is helpful to enforce that values e.g. the mass will stay positive
regardless of the applied distribution parameters.
Args:
force_positive: True to force positive parameters, false otherwise.
Returns:
The randomization builder to allow chaining methods.
"""
self.storage["force_positive"] = force_positive
return self
def add(self) -> None:
"""
Close the chaining of methods and return to the SDF randomizer the configuration.
Raises:
RuntimeError: If the XPath pattern does not find any match in the SDF.
"""
data = RandomizationData(**self.storage)
if len(self.randomizer.find_xpath(data.xpath)) == 0:
raise RuntimeError(f"Failed to find element matching XPath '{data.xpath}'")
self.randomizer.insert(randomization_data=data)
class SDFRandomizer:
"""
Randomized SDF files generator.
Args:
sdf_model: The absolute path to the SDF file.
Raises:
ValueError: If the SDF file does not exist.
"""
def __init__(self, sdf_model: str):
self._sdf_file = sdf_model
if not Path(self._sdf_file).is_file():
raise ValueError(f"File '{sdf_model}' does not exist")
# Initialize the root
tree = self._get_tree_from_file(self._sdf_file)
self._root: etree.Element = tree.getroot()
# List of randomizations
self._randomizations: List[RandomizationData] = []
# List of default values used with Method.Coefficient
self._default_values: Dict[etree.Element, float] = {}
# Store an independent RNG
self.rng = np.random.default_rng()
def seed(self, seed: int) -> None:
"""
Seed the SDF randomizer.
Args:
seed: The seed number.
"""
self.rng = np.random.default_rng(seed)
def find_xpath(self, xpath: str) -> List[etree.Element]:
"""
Find the elements that match an XPath pattern.
This method could be helpful to test the matches of a XPath pattern before using
it in :py:meth:`~gym_ignition.randomizers.model.sdf.RandomizationDataBuilder.at_xpath`.
Args:
xpath: The XPath pattern.
Return:
A list of elements matching the XPath pattern.
"""
return self._root.findall(xpath)
def process_data(self) -> None:
"""
Process all the inserted randomizations.
Raises:
RuntimeError: If the XPath of a randomization has no matches.
"""
# Since we support multi-match XPaths, we expand all the individual matches
expanded_randomizations = []
for data in self._randomizations:
# Find all the matches
elements: List[etree.Element] = self._root.findall(path=data.xpath)
if len(elements) == 0:
raise RuntimeError(f"Failed to find elements from XPath '{data.xpath}'")
for element in elements:
if data.ignore_zeros and float(self._get_element_text(element)) == 0:
continue
# Get the precise XPath to the element
element_xpath = element.getroottree().getpath(element)
# Get the parameters
params = data.parameters
if data.method in {Method.Additive, Method.Coefficient}:
element_text = float(self._get_element_text(element))
self._default_values[element] = element_text
# Update the data
complete_data = data._replace(
xpath=element_xpath, element=element, parameters=params
)
expanded_randomizations.append(complete_data)
# Store the updated data
self._randomizations = expanded_randomizations
def sample(self, pretty_print=False) -> str:
"""
Sample a randomized SDF string.
Args:
pretty_print: True to pretty print the output.
Raises:
ValueError: If the distribution of a randomization is not recognized.
ValueError: If the method of a randomization is not recognized.
Returns:
The randomized model as SDF string.
"""
for data in self._randomizations:
if data.distribution is Distribution.Gaussian:
sample = self.rng.normal(
loc=data.parameters.mean, scale=data.parameters.variance
)
elif data.distribution is Distribution.Uniform:
sample = self.rng.uniform(
low=data.parameters.low, high=data.parameters.high
)
else:
raise ValueError("Distribution not recognized")
# Update the value
if data.method is Method.Absolute:
data.element.text = str(sample)
elif data.method is Method.Additive:
default_value = self._default_values[data.element]
data.element.text = str(sample + default_value)
elif data.method is Method.Coefficient:
default_value = self._default_values[data.element]
data.element.text = str(sample * default_value)
else:
raise ValueError("Method not recognized")
if data.force_positive:
data.element.text = str(max(float(data.element.text), 0.0))
return etree.tostring(self._root, pretty_print=pretty_print).decode()
def new_randomization(self) -> RandomizationDataBuilder:
"""
Start the chaining to build a new randomization.
Return:
A randomization builder.
"""
return RandomizationDataBuilder(randomizer=self)
def insert(self, randomization_data) -> None:
"""
Insert a randomization.
Args:
randomization_data: A new randomization.
"""
self._randomizations.append(randomization_data)
def get_active_randomizations(self) -> List[RandomizationData]:
"""
Return the active randomizations.
This method could be helpful also in the case of multi-match XPath patterns to
validate that the inserted randomizations have been processed correctly.
Returns:
The list of the active randomizations.
"""
return self._randomizations
def clean(self) -> None:
"""
Clean the SDF randomizer.
"""
self._randomizations = []
self._default_values = {}
tree = self._get_tree_from_file(self._sdf_file)
self._root = tree.getroot()
@staticmethod
def _get_tree_from_file(xml_file) -> etree.ElementTree:
parser = etree.XMLParser(remove_blank_text=True)
tree = etree.parse(source=xml_file, parser=parser)
return tree
@staticmethod
def _get_element_text(element: etree.Element) -> str:
text = element.text
if text is None:
raise RuntimeError(f"The element {element.tag} does not have any content")
return text
================================================
FILE: python/gym_ignition/randomizers/physics/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import dart
================================================
FILE: python/gym_ignition/randomizers/physics/dart.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import gym_ignition.base.task
from gym_ignition import randomizers
from scenario import gazebo as scenario
class DART(randomizers.abc.PhysicsRandomizer):
"""
Class that configures the Gazebo World of a Task to use the DART physics engine.
Note:
This class does not apply any physics randomization.
"""
def __init__(self):
super().__init__()
def get_engine(self):
return scenario.PhysicsEngine_dart
def randomize_physics(self, task: gym_ignition.base.task.Task, **kwargs) -> None:
pass
================================================
FILE: python/gym_ignition/rbd/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import conversions, idyntree, utils
================================================
FILE: python/gym_ignition/rbd/conversions.py
================================================
import abc
from typing import Tuple
import numpy as np
from scipy.spatial.transform import Rotation
class Transform(abc.ABC):
@staticmethod
def from_position_and_quaternion(
position: np.ndarray, quaternion: np.ndarray
) -> np.ndarray:
if quaternion.size != 4:
raise ValueError("Quaternion array must have 4 elements")
rotation = Quaternion.to_rotation(quaternion)
transform = Transform.from_position_and_rotation(
position=position, rotation=rotation
)
return transform
@staticmethod
def from_position_and_rotation(
position: np.ndarray, rotation: np.ndarray
) -> np.ndarray:
if position.size != 3:
raise ValueError("Position array must have 3 elements")
if rotation.shape != (3, 3):
raise ValueError("Rotation must be a square 3x3 matrix")
transform = np.eye(4)
transform[0:3, 3] = position
transform[0:3, 0:3] = rotation
return transform
@staticmethod
def to_position_and_rotation(
transform: np.ndarray,
) -> Tuple[np.ndarray, np.ndarray]:
if transform.shape != (4, 4):
raise ValueError("Transform must be a 4x4 matrix")
position = transform[0:3, 3]
rotation = transform[0:3, 0:3]
return position, rotation
@staticmethod
def to_position_and_quaternion(
transform: np.ndarray,
) -> Tuple[np.ndarray, np.ndarray]:
p, R = Transform.to_position_and_rotation(transform=transform)
return p, Quaternion.from_matrix(matrix=R)
class Quaternion(abc.ABC):
@staticmethod
def to_wxyz(xyzw: np.ndarray) -> np.ndarray:
if xyzw.shape != (4,):
raise ValueError(xyzw)
return xyzw[[3, 0, 1, 2]]
@staticmethod
def to_xyzw(wxyz: np.ndarray) -> np.ndarray:
if wxyz.shape != (4,):
raise ValueError(wxyz)
return wxyz[[1, 2, 3, 0]]
@staticmethod
def to_rotation(quaternion: np.ndarray) -> np.ndarray:
if quaternion.shape != (4,):
raise ValueError(quaternion)
xyzw = Quaternion.to_xyzw(quaternion)
return Rotation.from_quat(xyzw).as_matrix()
@staticmethod
def from_matrix(matrix: np.ndarray) -> np.ndarray:
if matrix.shape != (3, 3):
raise ValueError(matrix)
quaternion_xyzw = Rotation.from_matrix(matrix).as_quat()
quaternion_wxyz = Quaternion.to_wxyz(quaternion_xyzw)
return quaternion_wxyz
================================================
FILE: python/gym_ignition/rbd/idyntree/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import helpers, inverse_kinematics_nlp, kindyncomputations, numpy
================================================
FILE: python/gym_ignition/rbd/idyntree/helpers.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
import os
from enum import Enum, auto
from typing import List
import idyntree.bindings as idt
from gym_ignition.utils import resource_finder
class FrameVelocityRepresentation(Enum):
MIXED_REPRESENTATION = auto()
BODY_FIXED_REPRESENTATION = auto()
INERTIAL_FIXED_REPRESENTATION = auto()
def to_idyntree(self):
if self.value == FrameVelocityRepresentation.MIXED_REPRESENTATION.value:
return idt.MIXED_REPRESENTATION
elif self.value == FrameVelocityRepresentation.BODY_FIXED_REPRESENTATION.value:
return idt.BODY_FIXED_REPRESENTATION
elif (
self.value
== FrameVelocityRepresentation.INERTIAL_FIXED_REPRESENTATION.value
):
return idt.INERTIAL_FIXED_REPRESENTATION
else:
raise ValueError(self.value)
class iDynTreeHelpers(abc.ABC):
@staticmethod
def get_model_loader(model_file: str, considered_joints: List[str] = None):
# Find the urdf file
urdf_file = resource_finder.find_resource(file_name=model_file)
# Get the file extension
folder, model_file = os.path.split(urdf_file)
model_name, extension = os.path.splitext(model_file)
if extension == ".sdf":
raise RuntimeError("SDF models are not currently supported by iDynTree")
# Create the model loader
mdl_loader = idt.ModelLoader()
# Load the urdf model
if considered_joints is None:
ok_load = mdl_loader.loadModelFromFile(urdf_file)
else:
ok_load = mdl_loader.loadReducedModelFromFile(urdf_file, considered_joints)
if not ok_load:
raise RuntimeError(f"Failed to load model from file '{urdf_file}'")
return mdl_loader
@staticmethod
def get_kindyncomputations(
model_file: str,
considered_joints: List[str] = None,
velocity_representation: FrameVelocityRepresentation = FrameVelocityRepresentation.MIXED_REPRESENTATION,
):
# Get the model loader
model_loader = iDynTreeHelpers.get_model_loader(model_file, considered_joints)
# Create KinDynComputations and insert the model
kindyn = idt.KinDynComputations()
ok_load = kindyn.loadRobotModel(model_loader.model())
if not ok_load:
raise RuntimeError("Failed to load model")
# Configure the velocity representation
velocity_representation_idyntree = velocity_representation.to_idyntree()
ok_repr = kindyn.setFrameVelocityRepresentation(
velocity_representation_idyntree
)
if not ok_repr:
raise RuntimeError("Failed to set the velocity representation")
return kindyn
================================================
FILE: python/gym_ignition/rbd/idyntree/inverse_kinematics_nlp.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import os
from dataclasses import dataclass
from enum import Enum, auto
from typing import Dict, List, Optional, Tuple, Union
import idyntree.bindings as idt
import numpy as np
from gym_ignition import rbd
class TargetType(Enum):
POSITION = auto()
ROTATION = auto()
POSE = auto()
@dataclass
class TransformTargetData:
position: np.ndarray
quaternion: np.ndarray
@dataclass
class TargetData:
type: TargetType
weight: Union[float, Tuple[float, float]]
data: Union[np.ndarray, TransformTargetData]
@dataclass
class IKSolution:
joint_configuration: np.ndarray
base_position: np.ndarray = np.array([0.0, 0.0, 0.0])
base_quaternion: np.ndarray = np.array([1.0, 0.0, 0.0, 0.0])
class RotationParametrization(Enum):
QUATERNION = auto()
ROLL_PITCH_YAW = auto()
def to_idyntree(self):
if self.value == RotationParametrization.QUATERNION.value:
return idt.InverseKinematicsRotationParametrizationQuaternion
elif self.value == RotationParametrization.ROLL_PITCH_YAW.value:
return idt.InverseKinematicsRotationParametrizationRollPitchYaw
else:
raise ValueError(self.value)
class TargetResolutionMode(Enum):
TARGET_AS_CONSTRAINT_FULL = auto()
TARGET_AS_CONSTRAINT_NONE = auto()
TARGET_AS_CONSTRAINT_POSITION = auto()
TARGET_AS_CONSTRAINT_ROTATION = auto()
def to_idyntree(self):
if self.value == TargetResolutionMode.TARGET_AS_CONSTRAINT_FULL.value:
return idt.InverseKinematicsTreatTargetAsConstraintFull
elif self.value == TargetResolutionMode.TARGET_AS_CONSTRAINT_NONE.value:
return idt.InverseKinematicsTreatTargetAsConstraintNone
elif self.value == TargetResolutionMode.TARGET_AS_CONSTRAINT_POSITION.value:
return idt.InverseKinematicsTreatTargetAsConstraintPositionOnly
elif self.value == TargetResolutionMode.TARGET_AS_CONSTRAINT_ROTATION.value:
return idt.InverseKinematicsTreatTargetAsConstraintRotationOnly
else:
raise ValueError(self.value)
class InverseKinematicsNLP:
def __init__(
self,
urdf_filename: str,
considered_joints: List[str] = None,
joint_serialization: List[str] = None,
) -> None:
self._floating_base: bool = False
self._base_frame: Optional[str] = None
self._urdf_filename: str = urdf_filename
self._targets_data: Dict[str, TargetData] = dict()
self._ik: Optional[idt.InverseKinematics] = None
self._considered_joints: List[str] = considered_joints
self._joint_serialization: List[str] = joint_serialization
# ======================
# INITIALIZATION METHODS
# ======================
def initialize(
self,
rotation_parametrization: RotationParametrization = RotationParametrization.ROLL_PITCH_YAW,
target_mode: TargetResolutionMode = TargetResolutionMode.TARGET_AS_CONSTRAINT_NONE,
cost_tolerance: float = 1e-08,
constraints_tolerance: float = 1e-4,
max_iterations: int = 1000,
base_frame: str = None,
floating_base: bool = False,
verbosity: int = 1,
) -> None:
# Create the IK object
self._ik = idt.InverseKinematics()
# Load the URDF model and get the model loader object.
# We create the full model with all the joints specified in joint_serialization.
model_loader: idt.ModelLoader = self._get_model_loader(
urdf=self._urdf_filename, joint_serialization=self._joint_serialization
)
# Get the model
model: idt.Model = model_loader.model()
# If all joints are enabled, get the list of joint names in order to know the
# serialization of the full IK solution
if self._joint_serialization is None:
self._joint_serialization = []
for joint_idx in range(model.getNrOfJoints()):
self._joint_serialization.append(model.getJointName(joint_idx))
# Configure the considered joints for the optimization.
# If not specified, use the serialization of the full solution.
if self._considered_joints is None:
self._considered_joints = self._joint_serialization
# Set the model in the IK specifying the considered joints
if not self._ik.setModel(model, self._considered_joints):
raise RuntimeError("Failed to set the model in the IK object")
# Configure IK
self._ik.setVerbosity(verbosity)
self._ik.setMaxIterations(max_iterations)
self._ik.setCostTolerance(cost_tolerance)
self._ik.setConstraintsTolerance(constraints_tolerance)
self._ik.setDefaultTargetResolutionMode(target_mode.to_idyntree())
self._ik.setRotationParametrization(rotation_parametrization.to_idyntree())
# Optionally change the base frame
if base_frame is not None:
# Store the frame of the base link
self._base_frame = base_frame
# Change the base frame
if not self._ik.setFloatingBaseOnFrameNamed(base_frame):
raise RuntimeError("Failed to change floating base frame")
else:
self._base_frame = model.getLinkName(model.getDefaultBaseLink())
# Store whether the IK optimize a fixed or floating base robot
self._floating_base = floating_base
if not self._floating_base:
# Add a frame constraint for the base
self.add_frame_transform_constraint(
frame_name=self._base_frame,
position=np.array([0.0, 0, 0.0]),
quaternion=np.array([1.0, 0, 0, 0]),
)
def set_current_robot_configuration(
self,
base_position: np.ndarray,
base_quaternion: np.ndarray,
joint_configuration: np.ndarray,
) -> None:
if joint_configuration.size != len(self._joint_serialization):
raise ValueError(joint_configuration)
H = rbd.idyntree.numpy.FromNumPy.to_idyntree_transform(
position=base_position, quaternion=base_quaternion
)
q = rbd.idyntree.numpy.FromNumPy.to_idyntree_dyn_vector(
array=joint_configuration
)
if not self._ik.setCurrentRobotConfiguration(H, q):
raise RuntimeError("Failed to set the current robot configuration")
if not self._floating_base:
self.update_frame_transform_constraint(
frame_name=self._base_frame,
position=base_position,
quaternion=base_quaternion,
)
def set_current_joint_configuration(
self, joint_name: str, configuration: float
) -> None:
if joint_name not in self._joint_serialization:
raise ValueError(joint_name)
if not self._ik.setJointConfiguration(
jointName=joint_name, jointConfiguration=configuration
):
raise RuntimeError(
f"Failed to set the configuration of joint '{joint_name}'"
)
# ==============
# TARGET METHODS
# ==============
def add_target(
self,
frame_name: str,
target_type: TargetType,
weight: Union[float, Tuple[float, float]] = None,
as_constraint: bool = False,
) -> None:
# Check the type of the 'weight' argument
float_target_types = {TargetType.ROTATION, TargetType.POSITION}
weight_type = float if target_type in float_target_types else tuple
# Backward compatibility: if the target type is POSE and the weight is a float,
# we apply the same weight to both target components
if target_type is TargetType.POSE and isinstance(weight, float):
weight = (weight, weight)
# Set the default weight if not specified
default_weight = 1.0 if target_type in float_target_types else (1.0, 1.0)
weight = weight if weight is not None else default_weight
if not isinstance(weight, weight_type):
raise ValueError(f"The weight must be {weight_type} for this target")
if target_type == TargetType.ROTATION:
# Add the target
ok_target = self._ik.addRotationTarget(
frame_name, idt.Rotation.Identity(), weight
)
# Initialize the target data buffers
self._targets_data[frame_name] = TargetData(
type=TargetType.ROTATION, weight=weight, data=np.array([1.0, 0, 0, 0])
)
elif target_type == TargetType.POSITION:
# Add the target
ok_target = self._ik.addPositionTarget(
frame_name, idt.Position_Zero(), weight
)
# Initialize the target data buffers
self._targets_data[frame_name] = TargetData(
type=TargetType.POSITION, weight=weight, data=np.array([0.0, 0, 0])
)
elif target_type == TargetType.POSE:
# Add the target
ok_target = self._ik.addTarget(
frame_name, idt.Transform.Identity(), weight[0], weight[1]
)
# Create the transform target data
target_data = TransformTargetData(
position=np.array([0.0, 0, 0]), quaternion=np.array([1.0, 0, 0, 0])
)
# Initialize the target data buffers
self._targets_data[frame_name] = TargetData(
type=TargetType.POSE, weight=weight, data=target_data
)
else:
raise ValueError(target_type)
if not ok_target:
raise RuntimeError(f"Failed to add target for frame '{frame_name}'")
if as_constraint:
if target_type == TargetType.ROTATION:
constraint = idt.InverseKinematicsTreatTargetAsConstraintRotationOnly
elif target_type == TargetType.POSITION:
constraint = idt.InverseKinematicsTreatTargetAsConstraintPositionOnly
else:
assert target_type == TargetType.POSE
constraint = idt.InverseKinematicsTreatTargetAsConstraintFull
if not self._ik.setTargetResolutionMode(frame_name, constraint):
raise RuntimeError(f"Failed to set target '{frame_name}' as constraint")
def add_com_target(
self,
weight: float = 1.0,
as_constraint: bool = False,
constraint_tolerance: float = 1e-8,
) -> None:
# Add the target
self._ik.setCOMTarget(idt.Position_Zero(), weight)
# Configure it either as target or constraint
self._ik.setCOMAsConstraint(asConstraint=as_constraint)
self._ik.setCOMAsConstraintTolerance(tolerance=constraint_tolerance)
# Initialize the target data buffers
assert "com" not in self._targets_data.keys()
self._targets_data["com"] = TargetData(
type=TargetType.POSITION, weight=weight, data=np.array([0.0, 0, 0])
)
def update_position_target(self, target_name: str, position: np.ndarray) -> None:
if target_name not in self.get_active_target_names(
target_type=TargetType.POSITION
):
raise ValueError(f"Failed to find a position target '{target_name}'")
# Create the iDynTree position
p = rbd.idyntree.numpy.FromNumPy.to_idyntree_position(position=position)
# Update the target inside IK
if not self._ik.updateTarget(target_name, p):
raise RuntimeError(f"Failed to update position of target '{target_name}'")
# Get the configured weight
weight = self._targets_data[target_name].weight
# Update the target data
self._targets_data[target_name] = TargetData(
type=TargetType.POSITION, weight=weight, data=position
)
def update_rotation_target(self, target_name: str, quaternion: np.ndarray) -> None:
if target_name not in self.get_active_target_names(
target_type=TargetType.ROTATION
):
raise ValueError(f"Failed to find a rotation target '{target_name}'")
# Create the iDynTree rotation matrix
R = rbd.idyntree.numpy.FromNumPy.to_idyntree_rotation(quaternion=quaternion)
# Update the target inside IK
if not self._ik.updateRotationTarget(target_name, R):
raise RuntimeError(f"Failed to update rotation of target '{target_name}'")
# Get the configured weight
weight = self._targets_data[target_name].weight
# Update the target data
self._targets_data[target_name] = TargetData(
type=TargetType.ROTATION, weight=weight, data=quaternion
)
def update_transform_target(
self, target_name: str, position: np.ndarray, quaternion: np.ndarray
) -> None:
if target_name not in self.get_active_target_names(target_type=TargetType.POSE):
raise ValueError(f"Failed to find a transform target '{target_name}'")
# Create the iDynTree transform
H = rbd.idyntree.numpy.FromNumPy.to_idyntree_transform(
position=position, quaternion=quaternion
)
# Update the target inside IK
if not self._ik.updateTarget(target_name, H):
raise RuntimeError(f"Failed to update transform of target '{target_name}'")
# Get the configured weight
weight = self._targets_data[target_name].weight
# Create the transform target data
transform_data = TransformTargetData(position=position, quaternion=quaternion)
# Update the target data
self._targets_data[target_name] = TargetData(
type=TargetType.POSE, weight=weight, data=transform_data
)
def update_com_target(self, position: np.ndarray) -> None:
if not self._ik.isCOMTargetActive():
raise RuntimeError("Constraint on CoM not active")
# Create the iDynTree position
p = rbd.idyntree.numpy.FromNumPy.to_idyntree_position(position=position)
# Update the target inside IK
self._ik.setCOMTarget(p, self._targets_data["com"].weight)
# Update the target data
self._targets_data["com"] = TargetData(
type=TargetType.POSITION,
weight=self._targets_data["com"].weight,
data=position,
)
def deactivate_com_target(self) -> None:
if not self._ik.isCOMTargetActive():
raise RuntimeError("Constraint on CoM not active")
self._ik.deactivateCOMTarget()
# =============
# FRAME METHODS
# =============
def add_frame_transform_constraint(
self, frame_name: str, position: np.ndarray, quaternion: np.ndarray
) -> None:
# Create the transform
H = rbd.idyntree.numpy.FromNumPy.to_idyntree_transform(
position=position, quaternion=quaternion
)
# Add the target
if not self._ik.addFrameConstraint(frame_name, H):
raise RuntimeError(f"Failed to add constraint on frame '{frame_name}'")
def add_frame_position_constraint(
self, frame_name: str, position: np.ndarray
) -> None:
# Create the position
p = rbd.idyntree.numpy.FromNumPy.to_idyntree_position(position=position)
# Add the constraint
if not self._ik.addFramePositionConstraint(frame_name, p):
raise RuntimeError(f"Failed to add constraint on frame '{frame_name}'")
def add_frame_rotation_constraint(
self, frame_name: str, quaternion: np.ndarray
) -> None:
# Create the position
R = rbd.idyntree.numpy.FromNumPy.to_idyntree_rotation(quaternion=quaternion)
# Add the constraint
if not self._ik.addFrameRotationConstraint(frame_name, R):
raise RuntimeError(f"Failed to add constraint on frame '{frame_name}'")
def update_frame_transform_constraint(
self, frame_name: str, position: np.ndarray, quaternion: np.ndarray
) -> None:
if not self._ik.isFrameConstraintActive(frame_name):
raise RuntimeError(f"Constraint on frame '{frame_name}' not active")
# Create the transform
H = rbd.idyntree.numpy.FromNumPy.to_idyntree_transform(
position=position, quaternion=quaternion
)
if not self._ik.activateFrameConstraint(frame_name, H):
raise RuntimeError(f"Failed to update constraint on frame '{frame_name}'")
def deactivate_frame_constraint(self, frame_name: str) -> None:
if not self._ik.isFrameConstraintActive(frame_name):
raise RuntimeError(f"Constraint on frame '{frame_name}' not active")
if not self._ik.deactivateFrameConstraint(frame_name):
raise RuntimeError(
f"Failed to deactivate constraint on frame '{frame_name}'"
)
# ===================
# IK SOLUTION METHODS
# ===================
def solve(self) -> None:
if not self._ik.solve():
raise RuntimeError("Failed to solve IK")
# Initialize next solver call
self._warm_start_with_last_solution()
def warm_start_from(
self, full_solution: IKSolution = None, reduced_solution: IKSolution = None
) -> None:
if (
full_solution is None
and reduced_solution is None
or full_solution is not None
and reduced_solution is not None
):
raise ValueError("You have to specify either a full or a reduced solution")
if (
reduced_solution is not None
and reduced_solution.joint_configuration.size
!= len(self._considered_joints)
):
raise RuntimeError(
"The joint configuration does not match the number of considered joints"
)
if full_solution is not None and full_solution.joint_configuration.size != len(
self._joint_serialization
):
raise RuntimeError(
"The joint configuration does not match the number of model joints"
)
solution = reduced_solution if reduced_solution is not None else full_solution
H = rbd.idyntree.numpy.FromNumPy.to_idyntree_transform(
position=solution.base_position, quaternion=solution.base_quaternion
)
q = rbd.idyntree.numpy.FromNumPy.to_idyntree_dyn_vector(
array=solution.joint_configuration
)
# Warm start the solver
if not self._ik.setFullJointsInitialCondition(H, q):
raise RuntimeError("Failed to warm start the IK solver")
# =======
# GETTERS
# =======
def get_base_frame(self) -> str:
return self._base_frame
def get_available_target_names(self) -> List[str]:
# Get the reduced model
model = self._ik.reducedModel()
# Note that also frames (modeled as fake links) are available targets
link_names = []
for link_index in range(model.getNrOfLinks()):
link_names.append(model.getLinkName(link_index))
return link_names
def get_active_target_names(self, target_type: TargetType = None) -> List[str]:
if target_type is None:
return list(self._targets_data.keys())
else:
return [
name
for name, value in self._targets_data.items()
if value.type == target_type
]
def get_target_data(self, target_name: str) -> TargetData:
return self._targets_data[target_name]
def get_full_solution(self) -> IKSolution:
# Initialize buffers
base_transform = idt.Transform.Identity()
joint_positions = idt.VectorDynSize(self._ik.fullModel().getNrOfJoints())
assert len(joint_positions) == len(self._joint_serialization)
# Get the solution
self._ik.getFullJointsSolution(base_transform, joint_positions)
# Convert to numpy objects
joint_positions = joint_positions.toNumPy()
base_position = base_transform.getPosition().toNumPy()
base_quaternion = base_transform.getRotation().asQuaternion().toNumPy()
return IKSolution(
base_position=base_position,
base_quaternion=base_quaternion,
joint_configuration=joint_positions,
)
def get_reduced_solution(self) -> IKSolution:
# Initialize buffers
base_transform = idt.Transform.Identity()
joint_positions = idt.VectorDynSize(self._ik.reducedModel().getNrOfJoints())
assert len(joint_positions) == len(self._considered_joints)
# Get the solution
self._ik.getReducedSolution(base_transform, joint_positions)
# Convert to numpy objects
joint_positions = joint_positions.toNumPy()
base_position = base_transform.getPosition().toNumPy()
base_quaternion = base_transform.getRotation().asQuaternion().toNumPy()
return IKSolution(
base_position=base_position,
base_quaternion=base_quaternion,
joint_configuration=joint_positions,
)
# ===============
# PRIVATE METHODS
# ===============
@staticmethod
def _get_model_loader(
urdf: str, joint_serialization: List[str] = None
) -> idt.ModelLoader:
# Get the model loader
model_loader = idt.ModelLoader()
if not os.path.exists(urdf):
raise FileNotFoundError(urdf)
# Load the model
if joint_serialization:
ok_load = model_loader.loadReducedModelFromFile(urdf, joint_serialization)
else:
ok_load = model_loader.loadModelFromFile(urdf)
if not ok_load:
raise RuntimeError("Failed to load model")
# Due to some SWIG internal, returning the model contained by the loader
# does not work as expected
return model_loader
def _warm_start_with_last_solution(self) -> None:
last_solution = self.get_full_solution()
self.warm_start_from(full_solution=last_solution)
================================================
FILE: python/gym_ignition/rbd/idyntree/kindyncomputations.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from typing import List, Tuple
import idyntree.bindings as idt
import numpy as np
from gym_ignition.rbd import conversions
from gym_ignition.rbd.idyntree import numpy
from scenario import core as scenario_core
from .helpers import FrameVelocityRepresentation, iDynTreeHelpers
class KinDynComputations:
def __init__(
self,
model_file: str,
considered_joints: List[str] = None,
world_gravity: np.ndarray = np.array([0, 0, -9.806]),
velocity_representation: FrameVelocityRepresentation = FrameVelocityRepresentation.MIXED_REPRESENTATION,
) -> None:
self.velocity_representation = velocity_representation
self.kindyn = iDynTreeHelpers.get_kindyncomputations(
model_file, considered_joints, velocity_representation
)
self.world_gravity = np.array(world_gravity)
self.dofs = self.kindyn.getNrOfDegreesOfFreedom()
if considered_joints is None:
model: idt.Model = self.kindyn.model()
all_joints = [model.getJointName(i) for i in range(model.getNrOfJoints())]
self._considered_joints = all_joints
else:
self._considered_joints = considered_joints
def joint_serialization(self) -> List[str]:
return self._considered_joints
def set_robot_state(
self,
s: np.ndarray,
ds: np.ndarray,
world_H_base: np.ndarray = np.eye(4),
base_velocity: np.ndarray = np.zeros(6),
world_gravity: np.ndarray = None,
) -> None:
gravity = world_gravity if world_gravity is not None else self.world_gravity
if s.size != self.dofs:
raise ValueError(s)
if ds.size != self.dofs:
raise ValueError(ds)
if gravity.size != 3:
raise ValueError(gravity)
if world_H_base.shape != (4, 4):
raise ValueError(world_H_base)
if base_velocity.size != 6:
raise ValueError(base_velocity)
s_idyntree = numpy.FromNumPy.to_idyntree_dyn_vector(array=s)
ds_idyntree = numpy.FromNumPy.to_idyntree_dyn_vector(array=ds)
world_gravity_idyntree = numpy.FromNumPy.to_idyntree_fixed_vector(array=gravity)
world_H_base_idyntree = numpy.FromNumPy.to_idyntree_transform(
position=world_H_base[0:3, 3], rotation=world_H_base[0:3, 0:3]
)
base_velocity_idyntree = numpy.FromNumPy.to_idyntree_twist(
linear_velocity=base_velocity[0:3], angular_velocity=base_velocity[3:6]
)
ok_state = self.kindyn.setRobotState(
world_H_base_idyntree,
s_idyntree,
base_velocity_idyntree,
ds_idyntree,
world_gravity_idyntree,
)
if not ok_state:
raise RuntimeError("Failed to set the robot state")
def set_robot_state_from_model(
self, model: scenario_core.Model, world_gravity: np.ndarray = None
) -> None:
s = np.array(model.joint_positions(self.joint_serialization()))
ds = np.array(model.joint_velocities(self.joint_serialization()))
world_o_base = np.array(model.base_position())
world_quat_base = np.array(model.base_orientation())
# Velocity representations
body = FrameVelocityRepresentation.BODY_FIXED_REPRESENTATION.to_idyntree()
mixed = FrameVelocityRepresentation.MIXED_REPRESENTATION.to_idyntree()
if self.kindyn.getFrameVelocityRepresentation() is mixed:
base_linear_velocity = np.array(model.base_world_linear_velocity())
base_angular_velocity = np.array(model.base_world_angular_velocity())
elif self.kindyn.getFrameVelocityRepresentation() is body:
base_linear_velocity = np.array(model.base_body_linear_velocity())
base_angular_velocity = np.array(model.base_body_angular_velocity())
else:
raise RuntimeError("INERTIAL_FIXED_REPRESENTATION not yet supported")
# Pack the data structures
world_H_base = conversions.Transform.from_position_and_quaternion(
position=world_o_base, quaternion=world_quat_base
)
base_velocity_6d = np.concatenate((base_linear_velocity, base_angular_velocity))
self.set_robot_state(
s=s,
ds=ds,
world_H_base=world_H_base,
base_velocity=base_velocity_6d,
world_gravity=world_gravity,
)
def get_floating_base(self) -> str:
return self.kindyn.getFloatingBase()
def get_joint_positions(self) -> np.ndarray:
vector = idt.VectorDynSize()
if not self.kindyn.getJointPos(vector):
raise RuntimeError("Failed to extract joint positions")
return vector.toNumPy()
def get_joint_velocities(self) -> np.ndarray:
vector = idt.VectorDynSize()
if not self.kindyn.getJointVel(vector):
raise RuntimeError("Failed to extract joint velocities")
return vector.toNumPy()
def get_model_velocity(self) -> np.ndarray:
nu = idt.VectorDynSize()
if not self.kindyn.getModelVel(nu):
raise RuntimeError("Failed to get the model velocity")
return nu.toNumPy()
def get_model_position(self) -> np.ndarray:
W_H_B: idt.Transform = self.kindyn.getWorldBaseTransform()
rpy: idt.Vector3 = self.kindyn.getWorldBaseTransform().getRotation().asRPY()
q_base = np.concatenate([W_H_B.getPosition().toNumPy(), rpy.toNumPy()])
return np.concatenate([q_base, self.get_joint_positions()])
def get_world_transform(self, frame_name: str) -> np.ndarray:
if self.kindyn.getFrameIndex(frame_name) < 0:
raise ValueError(f"Frame '{frame_name}' does not exist")
H = self.kindyn.getWorldTransform(frame_name)
return numpy.ToNumPy.from_idyntree_transform(transform=H)
def get_relative_transform(
self, ref_frame_name: str, frame_name: str
) -> np.ndarray:
if self.kindyn.getFrameIndex(ref_frame_name) < 0:
raise ValueError(f"Frame '{ref_frame_name}' does not exist")
if self.kindyn.getFrameIndex(frame_name) < 0:
raise ValueError(f"Frame '{frame_name}' does not exist")
ref_H_other: idt.Transform = self.kindyn.getRelativeTransform(
ref_frame_name, frame_name
)
return ref_H_other.asHomogeneousTransform().toNumPy()
def get_world_base_transform(self) -> np.ndarray:
W_H_B: idt.Transform = self.kindyn.getWorldBaseTransform()
return W_H_B.asHomogeneousTransform().toNumPy()
def get_relative_transform_explicit(
self,
ref_frame_origin: str,
ref_frame_orientation: str,
frame_origin: str,
frame_orientation: str,
) -> np.ndarray:
for frame in {
ref_frame_origin,
ref_frame_orientation,
frame_origin,
frame_orientation,
}:
if frame != "world" and self.kindyn.getFrameIndex(frame) < 0:
raise ValueError(f"Frame '{frame}' does not exist")
# Compute the transform AB_H_CD
frameA = ref_frame_origin
frameB = ref_frame_orientation
frameC = frame_origin
frameD = frame_orientation
if frame_orientation == "world":
frameD = frameC
if ref_frame_orientation == "world":
frameB = frameA
frameA_index = self.kindyn.getFrameIndex(frameName=frameA)
frameB_index = self.kindyn.getFrameIndex(frameName=frameB)
frameC_index = self.kindyn.getFrameIndex(frameName=frameC)
frameD_index = self.kindyn.getFrameIndex(frameName=frameD)
ref_H_other: idt.Transform = self.kindyn.getRelativeTransformExplicit(
frameA_index, frameB_index, frameC_index, frameD_index
)
AB_H_CD = ref_H_other
if frame_orientation == "world":
AB_H_C = AB_H_CD
C_H_CW: idt.Transform = self.kindyn.getWorldTransform(frameC).inverse()
C_H_CW.setPosition(idt.Position(0, 0, 0))
AB_H_CW = AB_H_C * C_H_CW
AB_H_CD = AB_H_CW
if ref_frame_orientation == "world":
A_H_CD = AB_H_CD
AW_H_A: idt.Transform = self.kindyn.getWorldTransform(frameA)
AW_H_A.setPosition(idt.Position(0, 0, 0))
AW_H_CD = AW_H_A * A_H_CD
AB_H_CD = AW_H_CD
return AB_H_CD.asHomogeneousTransform().toNumPy()
def get_relative_adjoint_wrench_transform_explicit(
self,
ref_frame_origin: str,
ref_frame_orientation: str,
frame_origin: str,
frame_orientation: str,
) -> np.ndarray:
AB_H_CD = self.get_relative_transform_explicit(
ref_frame_origin=ref_frame_origin,
ref_frame_orientation=ref_frame_orientation,
frame_origin=frame_origin,
frame_orientation=frame_orientation,
)
return (
numpy.FromNumPy.to_idyntree_transform(
position=AB_H_CD[0:3, 3], rotation=AB_H_CD[0:3, 0:3]
)
.asAdjointTransformWrench()
.toNumPy()
)
def get_mass_matrix(self) -> np.ndarray:
M = idt.MatrixDynSize()
if not self.kindyn.getFreeFloatingMassMatrix(M):
raise RuntimeError("Failed to get the free floating mass matrix")
return M.toNumPy()
def get_generalized_gravity_forces(self) -> np.ndarray:
g = idt.FreeFloatingGeneralizedTorques(self.kindyn.model())
if not self.kindyn.generalizedGravityForces(g):
raise RuntimeError("Failed to get the generalized gravity forces")
base_wrench: idt.Wrench = g.baseWrench()
joint_torques: idt.JointDOFsDoubleArray = g.jointTorques()
return np.concatenate(
[base_wrench.toNumPy().flatten(), joint_torques.toNumPy().flatten()]
)
def get_bias_forces(self) -> np.ndarray:
h = idt.FreeFloatingGeneralizedTorques(self.kindyn.model())
if not self.kindyn.generalizedBiasForces(h):
raise RuntimeError("Failed to get the generalized bias forces")
base_wrench: idt.Wrench = h.baseWrench()
joint_torques: idt.JointDOFsDoubleArray = h.jointTorques()
return np.concatenate(
[base_wrench.toNumPy().flatten(), joint_torques.toNumPy().flatten()]
)
def get_momentum(self) -> Tuple[np.ndarray, np.ndarray]:
spatial_momentum = self.kindyn.getLinearAngularMomentum()
momentum_6d = spatial_momentum.asVector().toNumPy()
linear, angular = np.split(momentum_6d, 2)
return linear, angular
def get_centroidal_momentum(self) -> Tuple[np.ndarray, np.ndarray]:
spatial_momentum = self.kindyn.getCentroidalTotalMomentum()
momentum_6d = spatial_momentum.asVector().toNumPy()
linear, angular = np.split(momentum_6d, 2)
return linear, angular
def get_com_position(self) -> np.ndarray:
W_p_com = self.kindyn.getCenterOfMassPosition()
return W_p_com.toNumPy()
def get_com_velocity(self) -> np.ndarray:
# Velocity representations
body = FrameVelocityRepresentation.BODY_FIXED_REPRESENTATION.to_idyntree()
mixed = FrameVelocityRepresentation.MIXED_REPRESENTATION.to_idyntree()
if self.kindyn.getFrameVelocityRepresentation() is mixed:
# The method always returns the MIXED velocity of the CoM, regardless of
# how KinDynComputations was configured.
v_com = self.kindyn.getCenterOfMassVelocity()
return v_com.toNumPy()
elif self.kindyn.getFrameVelocityRepresentation() is body:
# Get the transform of the base frame
W_H_B = self.kindyn.getWorldBaseTransform()
_, W_R_B = numpy.ToNumPy.from_idyntree_transform(
transform=W_H_B, split=True
)
# Get the rotation between world and base frame
B_R_W = np.linalg.inv(W_R_B)
# Convert linear velocity from MIXED to BODY representation
BW_v_com = self.kindyn.getCenterOfMassVelocity().toNumPy()
B_v_com = B_R_W @ BW_v_com
return B_v_com
else:
raise RuntimeError("INERTIAL_FIXED_REPRESENTATION not yet supported")
def get_average_velocity(self) -> np.ndarray:
twist: idt.Twist = self.kindyn.getAverageVelocity()
return twist.toNumPy()
def get_centroidal_average_velocity(self) -> np.ndarray:
twist: idt.Twist = self.kindyn.getCentroidalAverageVelocity()
return twist.toNumPy()
def get_frame_jacobian(self, frame_name: str) -> np.ndarray:
if self.kindyn.getFrameIndex(frame_name) < 0:
raise ValueError(f"Frame '{frame_name}' does not exist")
J = idt.MatrixDynSize(6, self.dofs + 6)
if not self.kindyn.getFrameFreeFloatingJacobian(frame_name, J):
raise RuntimeError("Failed to get the frame free floating jacobian")
return J.toNumPy()
def get_linear_angular_momentum_jacobian(self) -> np.ndarray:
J_mom = idt.MatrixDynSize()
if not self.kindyn.getLinearAngularMomentumJacobian(J_mom):
raise RuntimeError("Failed to get the momentum jacobian")
return J_mom.toNumPy()
def get_centroidal_total_momentum_jacobian(self) -> np.ndarray:
J_cmm = idt.MatrixDynSize()
if not self.kindyn.getCentroidalTotalMomentumJacobian(J_cmm):
raise RuntimeError("Failed to get the centroidal total momentum jacobian")
return J_cmm.toNumPy()
def get_average_velocity_jacobian(self) -> np.ndarray:
J_avg_vel = idt.MatrixDynSize()
if not self.kindyn.getAverageVelocityJacobian(J_avg_vel):
raise RuntimeError("Failed to get the average velocity jacobian")
return J_avg_vel.toNumPy()
def get_centroidal_average_velocity_jacobian(self) -> np.ndarray:
J_cen_avg_vel = idt.MatrixDynSize()
if not self.kindyn.getCentroidalAverageVelocityJacobian(J_cen_avg_vel):
raise RuntimeError("Failed to get the average velocity jacobian")
return J_cen_avg_vel.toNumPy()
def get_frame_bias_acc(self, frame_name: str) -> np.ndarray:
if self.kindyn.getFrameIndex(frame_name) < 0:
raise ValueError(f"Frame '{frame_name}' does not exist")
dJ_nu = self.kindyn.getFrameBiasAcc(frame_name)
return dJ_nu.toNumPy()
def get_com_bias_acc(self) -> np.ndarray:
dJ_nu = self.kindyn.getCenterOfMassBiasAcc()
return dJ_nu.toNumPy()
================================================
FILE: python/gym_ignition/rbd/idyntree/numpy.py
================================================
import abc
from typing import Tuple, Union
import idyntree.bindings as idt
import numpy as np
from gym_ignition import rbd
class FromNumPy(abc.ABC):
@staticmethod
def to_idyntree_dyn_vector(array: np.ndarray) -> idt.VectorDynSize:
return idt.VectorDynSize_FromPython(array)
@staticmethod
def to_idyntree_fixed_vector(array: np.ndarray):
size = array.size
supported_sizes = [3, 4, 6]
if size not in supported_sizes:
raise ValueError(array)
if size == 3:
idyntree_vector = idt.Vector3()
elif size == 4:
idyntree_vector = idt.Vector4()
elif size == 6:
idyntree_vector = idt.Vector6()
else:
raise RuntimeError
return idyntree_vector.FromPython(array)
@staticmethod
def to_idyntree_position(position: np.ndarray) -> idt.Position:
if position.size != 3:
raise ValueError("The position array must have 3 elements")
return idt.Position(position[0], position[1], position[2])
@staticmethod
def to_idyntree_rotation(quaternion: np.ndarray) -> idt.Rotation:
if quaternion.size != 4:
raise ValueError("The quaternion array must have 4 elements")
quat = idt.Vector4_FromPython(quaternion)
R = idt.Rotation()
R.fromQuaternion(quat)
return R
@staticmethod
def to_idyntree_transform(
position: np.ndarray, quaternion: np.ndarray = None, rotation: np.ndarray = None
) -> idt.Transform:
if quaternion is None and rotation is None:
raise ValueError("You must pass either a quaternion or a rotation")
if quaternion is not None and rotation is not None:
raise ValueError("You must pass either a quaternion or a rotation")
if rotation is not None:
quaternion = rbd.conversions.Quaternion.from_matrix(matrix=rotation)
p = FromNumPy.to_idyntree_position(position=position)
R = FromNumPy.to_idyntree_rotation(quaternion=quaternion)
H = idt.Transform()
H.setPosition(p)
H.setRotation(R)
return H
@staticmethod
def to_idyntree_twist(
linear_velocity: np.ndarray, angular_velocity: np.ndarray
) -> idt.Twist:
if linear_velocity.size != 3:
raise ValueError("The linear velocity must have 3 elements")
if angular_velocity.size != 3:
raise ValueError("The angular velocity must have 3 elements")
twist_numpy = np.concatenate((linear_velocity, angular_velocity))
twist = idt.Twist_FromPython(twist_numpy)
return twist
class ToNumPy(abc.ABC):
@staticmethod
def from_idyntree_vector(vector) -> np.ndarray:
input_types = (
idt.Vector3,
idt.Vector4,
idt.Vector6,
idt.VectorDynSize,
idt.Matrix3x3,
idt.Matrix4x4,
idt.Matrix6x6,
idt.MatrixDynSize,
)
if not isinstance(vector, input_types):
raise ValueError(vector)
return np.array(vector.toNumPy())
@staticmethod
def from_idyntree_transform(
transform: idt.Transform, split: bool = False
) -> Union[Tuple[np.ndarray, np.ndarray], np.ndarray]:
if not isinstance(transform, idt.Transform):
raise ValueError(transform)
rotation = transform.getRotation()
position = transform.getPosition()
if split:
return position.toNumPy(), rotation.toNumPy()
else:
H = np.zeros(shape=[4, 4])
H[0:3, 3] = position.toNumPy()
H[0:3, 0:3] = rotation.toNumPy()
return H
================================================
FILE: python/gym_ignition/rbd/utils.py
================================================
# Copyright (C) 2021 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import numpy as np
def wedge(vector3: np.ndarray) -> np.ndarray:
"""
Convert a 3D vector to a skew-symmetric matrix.
Args:
vector3: The 3D vector defining the matrix coefficients.
Returns:
The skew-symmetric matrix whose elements are created from the input vector.
Note:
The wedge operator can be useful to compute the cross product of 3D vectors:
:math:`v_1 \\times v_2 = v_1^\\wedge v_2`.
"""
if vector3.size != 3:
raise ValueError(vector3)
s = np.zeros(shape=(3, 3))
s[1, 0] = vector3[2]
s[0, 1] = -vector3[2]
s[0, 2] = vector3[1]
s[2, 0] = -vector3[1]
s[2, 1] = vector3[0]
s[1, 2] = -vector3[0]
return s
def vee(matrix3x3: np.ndarray) -> np.ndarray:
"""
Convert a 3x3 matrix to a 3D vector with the components of its skew-symmetric part.
Args:
matrix3x3: The input 3x3 matrix. If present, its symmetric part is removed.
Returns:
The 3D vector defining the skew-symmetric matrix.
Note:
This is the inverse operator of :py:func:`wedge`.
"""
if matrix3x3.shape != (3, 3):
raise ValueError(matrix3x3)
skew_symmetric = extract_skew(matrix3x3)
return np.array([skew_symmetric[2, 1], skew_symmetric[0, 2], skew_symmetric[1, 0]])
def extract_skew(matrix: np.ndarray) -> np.ndarray:
"""
Extract the skew-symmetric part of a square matrix.
Args:
matrix: A square matrix.
Returns:
The skew-symmetric part of the input matrix.
"""
if matrix.shape[0] != matrix.shape[1]:
raise ValueError(matrix)
return 0.5 * (matrix - matrix.T)
def extract_symm(matrix: np.ndarray) -> np.ndarray:
"""
Extract the symmetric part of a square matrix.
Args:
matrix: A square matrix.
Returns:
The symmetric part of the input matrix.
"""
if matrix.shape[0] != matrix.shape[1]:
raise ValueError(matrix)
return 0.5 * (matrix + matrix.T)
================================================
FILE: python/gym_ignition/runtimes/__init__.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import gazebo_runtime, realtime_runtime
================================================
FILE: python/gym_ignition/runtimes/gazebo_runtime.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from typing import Optional
import gym_ignition_models
from gym_ignition import base
from gym_ignition.base import runtime
from gym_ignition.utils import logger
from gym_ignition.utils.typing import *
from scenario import gazebo as scenario
class GazeboRuntime(runtime.Runtime):
"""
Implementation of :py:class:`~gym_ignition.base.runtime.Runtime` for the Ignition
Gazebo simulator.
Args:
task_cls: The class of the handled task.
agent_rate: The rate at which the environment is called.
physics_rate: The rate of the physics engine.
real_time_factor: The desired RTF of the simulation.
physics_engine: *(optional)* The physics engine to use.
world: *(optional)* The path to an SDF world file. The world should not contain
any physics plugin.
Note:
Physics randomization is still experimental and it could change in the future.
Physics is loaded only once, when the simulator starts. In order to change the
physics, a new simulator should be created.
"""
metadata = {"render.modes": ["human"]}
def __init__(
self,
task_cls: type,
agent_rate: float,
physics_rate: float,
real_time_factor: float,
physics_engine=scenario.PhysicsEngine_dart,
world: str = None,
**kwargs,
):
if gym.logger.MIN_LEVEL <= gym.logger.DEBUG:
import inspect
frame = inspect.currentframe()
args, _, _, values = inspect.getargvalues(frame)
gym.logger.debug(f"{dict({arg: values[arg] for arg in args})}")
# Gazebo attributes
self._gazebo: Optional[scenario.GazeboSimulator] = None
self._physics_rate = physics_rate
self._real_time_factor = real_time_factor
# Store the desired physics engine
self._physics_engine = physics_engine
# World attributes
self._world = None
self._world_sdf = world
self._world_name = None
# Create the Task object
task = task_cls(agent_rate=agent_rate, **kwargs)
if not isinstance(task, base.task.Task):
raise RuntimeError("The task is not compatible with the runtime")
# Wrap the task with the runtime
super().__init__(task=task, agent_rate=agent_rate)
# Trigger the initialization of the simulator and the world
_ = self.gazebo
# Initialize the spaces
self.action_space, self.observation_space = self.task.create_spaces()
# Store the spaces also in the task
self.task.action_space = self.action_space
self.task.observation_space = self.observation_space
# Seed the environment
self.seed()
# =================
# Runtime interface
# =================
def timestamp(self) -> float:
return self.world.time()
# =================
# gym.Env interface
# =================
def step(self, action: Action) -> State:
if not self.action_space.contains(action):
logger.warn("The action does not belong to the action space")
# Set the action
self.task.set_action(action)
# Step the simulator
ok_gazebo = self.gazebo.run()
assert ok_gazebo, "Failed to step gazebo"
# Get the observation
observation = self.task.get_observation()
assert isinstance(observation, np.ndarray)
if not self.observation_space.contains(observation):
logger.warn("The observation does not belong to the observation space")
# Get the reward
reward = self.task.get_reward()
assert isinstance(reward, float), "Failed to get the reward"
# Check termination
done = self.task.is_done()
# Get info
info = self.task.get_info()
return State((Observation(observation), Reward(reward), Done(done), Info(info)))
def reset(self) -> Observation:
# Reset the task
self.task.reset_task()
# Execute a paused step
ok_run = self.gazebo.run(paused=True)
if not ok_run:
raise RuntimeError("Failed to run Gazebo")
# Get the observation
observation = self.task.get_observation()
assert isinstance(observation, np.ndarray)
if not self.observation_space.contains(observation):
logger.warn("The observation does not belong to the observation space")
return Observation(observation)
def render(self, mode: str = "human", **kwargs) -> None:
if mode != "human":
raise ValueError(f"Render mode '{mode}' not supported")
gui_ok = self.gazebo.gui()
if not gui_ok:
raise RuntimeError("Failed to render the environment")
return
def close(self) -> None:
ok_close = self.gazebo.close()
if not ok_close:
raise RuntimeError("Failed to close Gazebo")
def seed(self, seed: int = None) -> SeedList:
# This method also seeds the spaces. To create them, the task could use the world.
# Here we check that is has been created.
if not self.task.has_world():
raise RuntimeError("The world has never been created")
# Seed the task (it will also seed the spaces)
seed = self.task.seed_task(seed)
return seed
# ==============================
# Properties and Private Methods
# ==============================
@property
def gazebo(self) -> scenario.GazeboSimulator:
if self._gazebo is not None:
assert self._gazebo.initialized()
return self._gazebo
# Compute the number of physics iteration to execute at every environment step
num_of_steps_per_run = self._physics_rate / self.agent_rate
if num_of_steps_per_run != int(num_of_steps_per_run):
logger.warn(
"Rounding the number of iterations to {} from the nominal {}".format(
int(num_of_steps_per_run), num_of_steps_per_run
)
)
# Create the simulator
gazebo = scenario.GazeboSimulator(
1.0 / self._physics_rate, self._real_time_factor, int(num_of_steps_per_run)
)
# Store the simulator
self._gazebo = gazebo
# Insert the world (it will initialize the simulator)
_ = self.world
assert self._gazebo.initialized()
return self._gazebo
@property
def world(self) -> scenario.World:
if self._world is not None:
assert self.gazebo.initialized()
return self._world
if self._gazebo is None:
raise RuntimeError("Gazebo has not yet been created")
if self._gazebo.initialized():
raise RuntimeError("Gazebo was already initialized, cannot insert world")
if self._world_sdf is None:
self._world_sdf = ""
self._world_name = "default"
else:
self._world_name = scenario.get_world_name_from_sdf(self._world_sdf)
# Load the world
ok_world = self._gazebo.insert_world_from_sdf(self._world_sdf, self._world_name)
if not ok_world:
raise RuntimeError("Failed to load SDF world")
# Initialize the simulator
ok_initialize = self._gazebo.initialize()
if not ok_initialize:
raise RuntimeError("Failed to initialize Gazebo")
if not self._gazebo.initialized():
raise RuntimeError("Gazebo was not initialized")
# Get the world
world = self._gazebo.get_world(self._world_name)
assert self._world_sdf is not None
assert self._world_name in self._gazebo.world_names()
if self._world_sdf == "":
# Insert the ground plane
ok_ground = world.insert_model(
gym_ignition_models.get_model_file("ground_plane")
)
if not ok_ground:
raise RuntimeError("Failed to insert the ground plane")
# Set the world in the task
self.task.world = world
# Select the physics engine
world.set_physics_engine(engine=self._physics_engine)
# Store the world
self._world = world
return self._world
================================================
FILE: python/gym_ignition/runtimes/realtime_runtime.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from gym_ignition.base import runtime, task
from gym_ignition.utils.typing import Action, Done, Info, Observation, State
class RealTimeRuntime(runtime.Runtime):
"""
Implementation of :py:class:`~gym_ignition.base.runtime.Runtime` for real-time
execution.
Warning:
This class is not yet complete.
"""
def __init__(self, task_cls: type, robot_cls: type, agent_rate: float, **kwargs):
# Build the environment
task_object = task_cls(**kwargs)
assert isinstance(
task_object, task.Task
), "'task_cls' object must inherit from Task"
super().__init__(task=task_object, agent_rate=agent_rate)
raise NotImplementedError
# =================
# Runtime interface
# =================
def timestamp(self) -> float:
raise NotImplementedError
# =================
# gym.Env interface
# =================
def step(self, action: Action) -> State:
# Validate action and robot
assert self.action_space.contains(action), "%r (%s) invalid" % (
action,
type(action),
)
# Set the action
ok_action = self.task.set_action(action)
assert ok_action, "Failed to set the action"
# TODO: realtime step
# Get the observation
observation = self.task.get_observation()
assert self.observation_space.contains(observation), "%r (%s) invalid" % (
observation,
type(observation),
)
# Get the reward
reward = self.task.get_reward()
assert reward, "Failed to get the reward"
# Check termination
done = self.task.is_done()
return State((observation, reward, Done(done), Info({})))
def reset(self) -> Observation:
# Get the observation
observation = self.task.get_observation()
return Observation(observation)
def render(self, mode: str = "human", **kwargs) -> None:
raise NotImplementedError
def close(self) -> None:
raise NotImplementedError
================================================
FILE: python/gym_ignition/scenario/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import model_with_file, model_wrapper
================================================
FILE: python/gym_ignition/scenario/model_with_file.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
class ModelWithFile(abc.ABC):
def __init__(self):
super().__init__()
@classmethod
@abc.abstractmethod
def get_model_file(cls) -> str:
pass
================================================
FILE: python/gym_ignition/scenario/model_wrapper.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
from scenario import core as scenario
class ModelWrapper(scenario.Model, abc.ABC):
def __init__(self, model: scenario.Model):
# No need to call scenario.Model.__init__()!
abc.ABC.__init__(self)
self.model = model
def __getattr__(self, name):
return getattr(self.model, name)
================================================
FILE: python/gym_ignition/utils/__init__.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import logger, math, misc, resource_finder, scenario
from .typing import *
================================================
FILE: python/gym_ignition/utils/logger.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import contextlib
import warnings
import gym
from gym import logger
from gym.logger import debug, error, info
from gym.utils import colorize
def custom_formatwarning(msg, *args, **kwargs):
"""
Custom format that overrides :py:func:`warnings.formatwarning`.
"""
if logger.MIN_LEVEL is logger.DEBUG:
warning = "{}:{} {}: {}\n".format(args[1], args[2], args[0].__name__, msg)
else:
warning = "{}\n".format(msg)
return warning
def warn(msg: str, *args) -> None:
"""
Custom definition of :py:func:`gym.logger.warn` function.
"""
if logger.MIN_LEVEL <= logger.WARN:
warnings.warn(colorize("%s: %s" % ("WARN", msg % args), "yellow"), stacklevel=2)
# Monkey patch warning formatting
warnings.formatwarning = custom_formatwarning
def set_level(level: int, scenario_level: int = None) -> None:
"""
Set the verbosity level of both :py:mod:`gym` and :py:mod:`gym_ignition`.
Accepted values:
- :py:const:`gym.logger.DEBUG` (10)
- :py:const:`gym.logger.INFO` (20)
- :py:const:`gym.logger.WARN` (30)
- :py:const:`gym.logger.ERROR` (40)
- :py:const:`gym.logger.DISABLED` (50)
Args:
level: The desired verbosity level.
scenario_level: The desired ScenarIO verbosity level (defaults to ``level``).
"""
# Set the gym verbosity
logger.set_level(level)
try:
from scenario import gazebo as scenario
except ImportError:
return
if scenario_level is None:
scenario_level = level
# Set the ScenarI/O verbosity
if scenario_level <= logger.DEBUG:
scenario.set_verbosity(scenario.Verbosity_debug)
elif scenario_level <= logger.INFO:
scenario.set_verbosity(scenario.Verbosity_info)
elif scenario_level <= logger.WARN:
scenario.set_verbosity(scenario.Verbosity_warning)
elif scenario_level <= logger.ERROR:
scenario.set_verbosity(scenario.Verbosity_error)
else:
scenario.set_verbosity(scenario.Verbosity_suppress_all)
@contextlib.contextmanager
def gym_verbosity(level: int):
old_level = gym.logger.MIN_LEVEL
gym.logger.set_level(level=level)
yield None
gym.logger.set_level(old_level)
================================================
FILE: python/gym_ignition/utils/math.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from numbers import Number
from typing import List, Union
import numpy as np
from scenario import gazebo as scenario
def normalize(
input: Union[Number, List[Number], np.ndarray],
low: Union[Number, List[Number], np.ndarray],
high: Union[Number, List[Number], np.ndarray],
) -> Union[Number, np.ndarray]:
if low is None or high is None:
return input
if isinstance(input, Number):
input = [input]
if isinstance(low, Number):
low = [low]
if isinstance(high, Number):
high = [high]
output = scenario.normalize(list(input), list(low), list(high))
if len(output) == 1:
return output[0]
else:
return np.array(output)
def denormalize(
input: Union[Number, List[Number], np.ndarray],
low: Union[Number, List[Number], np.ndarray],
high: Union[Number, List[Number], np.ndarray],
) -> Union[Number, np.ndarray]:
if low is None or high is None:
return input
if isinstance(input, Number):
input = [input]
if isinstance(low, Number):
low = [low]
if isinstance(high, Number):
high = [high]
output = scenario.denormalize(list(input), list(low), list(high))
if len(output) == 1:
return output[0]
else:
return np.array(output)
================================================
FILE: python/gym_ignition/utils/misc.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import tempfile
def string_to_file(string: str) -> str:
handle, tmpfile = tempfile.mkstemp()
with open(handle, "w") as f:
f.write(string)
return tmpfile
================================================
FILE: python/gym_ignition/utils/resource_finder.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import os
from os.path import exists, isfile
from typing import List
from gym_ignition.utils import logger
GYM_IGNITION_DATA_PATH = []
def get_search_paths() -> List[str]:
global GYM_IGNITION_DATA_PATH
return GYM_IGNITION_DATA_PATH
def add_path(data_path: str) -> None:
if not exists(data_path):
logger.warn(
f"The path '{data_path}' does not exist. Not added to the data path."
)
return
global GYM_IGNITION_DATA_PATH
for path in GYM_IGNITION_DATA_PATH:
if path == data_path:
logger.debug(f"The path '{data_path}' is already present in the data path")
return
logger.debug(f"Adding new search path: '{data_path}'")
GYM_IGNITION_DATA_PATH.append(data_path)
def add_path_from_env_var(env_variable: str) -> None:
if env_variable not in os.environ:
logger.warn(f"Failed to find '{env_variable}' environment variable")
return
# Get the environment variable
env_var_content = os.environ[env_variable]
# Remove leading ':' characters
if env_var_content[0] == ":":
env_var_content = env_var_content[1:]
# Split multiple value
env_var_paths = env_var_content.split(":")
for path in env_var_paths:
add_path(path)
def find_resource(file_name: str) -> str:
file_abs_path = ""
global GYM_IGNITION_DATA_PATH
logger.debug(f"Looking for file '{file_name}'")
# Handle if the path is absolute
if os.path.isabs(file_name):
if isfile(file_name):
logger.debug(f" Found resource: '{file_name}'")
return file_name
else:
raise FileNotFoundError(f"Failed to find resource '{file_name}'")
# Handle if the path is relative
for path in GYM_IGNITION_DATA_PATH:
logger.debug(f" Exploring folder '{path}'")
path_with_slash = path if path[-1] == "/" else path + "/"
candidate_abs_path = path_with_slash + file_name
if isfile(candidate_abs_path):
logger.debug(f" Found resource: '{candidate_abs_path}'")
file_abs_path = candidate_abs_path
break
if not file_abs_path:
raise FileNotFoundError(f"Failed to find resource '{file_name}'")
return file_abs_path
================================================
FILE: python/gym_ignition/utils/scenario.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from typing import List, Tuple, Union
import gym.spaces
import gym_ignition_models
import numpy as np
from scenario import core
from scenario import gazebo as scenario
def get_unique_model_name(world: scenario.World, model_name: str) -> str:
"""
Get a unique model name given a world configuration.
This function find the first available model name starting from the argument and
appending a integer postfix until the resulting name is unique in the world.
Tentatives example: `cartpole`, `cartpole1`, `cartpole2`, ...
Args:
world: An initialized world.
model_name: The first model name attempt.
Raises:
ValueError: If the world is not valid.
Returns:
The unique model name calculated from the original name.
"""
if world.id() == 0:
raise ValueError("The world is not valid")
postfix = 0
model_name_tentative = f"{model_name}"
while model_name_tentative in world.model_names():
postfix += 1
model_name_tentative = f"{model_name}{postfix}"
return model_name_tentative
def init_gazebo_sim(
step_size: float = 0.001, real_time_factor: float = 1.0, steps_per_run: int = 1
) -> Tuple[scenario.GazeboSimulator, Union[scenario.World, core.World]]:
"""
Initialize a Gazebo simulation with an empty world and default physics.
Args:
step_size: Gazebo step size.
real_time_factor: The desired real time factor of the simulation.
steps_per_run: How many steps should be executed at each Gazebo run.
Raises:
RuntimeError: If the initialization of either the simulator or the world fails.
Returns:
* **gazebo** -- The Gazebo simulator.
* **world** -- The default world.
"""
# Create the simulator
gazebo = scenario.GazeboSimulator(step_size, real_time_factor, steps_per_run)
# Initialize the simulator
ok_initialize = gazebo.initialize()
if not ok_initialize:
raise RuntimeError("Failed to initialize Gazebo")
# Get the world
world = gazebo.get_world()
# Insert the ground plane
ok_ground = world.insert_model(gym_ignition_models.get_model_file("ground_plane"))
if not ok_ground:
raise RuntimeError("Failed to insert the ground plane")
ok_physics = world.set_physics_engine(scenario.PhysicsEngine_dart)
if not ok_physics:
raise RuntimeError("Failed to insert the physics plugin")
return gazebo, world
def get_joint_positions_space(
model: scenario.Model, considered_joints: List[str] = None
) -> gym.spaces.Box:
"""
Build a Box space from the joint position limits.
Args:
model: The model from which generating the joint space.
considered_joints: List of considered joints. It is helpful to restrict the set
of joints and to enforce a custom joint serialization.
Returns:
A box space created from the model's joint position limits.
"""
if considered_joints is None:
considered_joints = model.joint_names()
# Get the joint limits
joint_limits = model.joint_limits(considered_joints)
# Build the space
space = gym.spaces.Box(
low=np.array(joint_limits.min), high=np.array(joint_limits.max)
)
return space
================================================
FILE: python/gym_ignition/utils/typing.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from typing import Dict, List, NewType, Tuple, Union
import gym.spaces
import numpy as np
Done = NewType("Done", bool)
Info = NewType("Info", Dict)
Reward = NewType("Reward", float)
Observation = NewType("Observation", np.ndarray)
Action = NewType("Action", Union[np.ndarray, np.number])
SeedList = NewType("SeedList", List[int])
State = NewType("State", Tuple[Observation, Reward, Done, Info])
ActionSpace = NewType("ActionSpace", gym.spaces.Space)
ObservationSpace = NewType("ObservationSpace", gym.spaces.Space)
================================================
FILE: python/gym_ignition_environments/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import numpy
from gym.envs.registration import register
from . import models, randomizers, tasks
max_float = float(numpy.finfo(numpy.float32).max)
register(
id="Pendulum-Gazebo-v0",
entry_point="gym_ignition.runtimes.gazebo_runtime:GazeboRuntime",
max_episode_steps=5000,
kwargs={
"task_cls": tasks.pendulum_swingup.PendulumSwingUp,
"agent_rate": 1000,
"physics_rate": 1000,
"real_time_factor": max_float,
},
)
register(
id="CartPoleDiscreteBalancing-Gazebo-v0",
entry_point="gym_ignition.runtimes.gazebo_runtime:GazeboRuntime",
max_episode_steps=5000,
kwargs={
"task_cls": tasks.cartpole_discrete_balancing.CartPoleDiscreteBalancing,
"agent_rate": 1000,
"physics_rate": 1000,
"real_time_factor": max_float,
},
)
register(
id="CartPoleContinuousBalancing-Gazebo-v0",
entry_point="gym_ignition.runtimes.gazebo_runtime:GazeboRuntime",
max_episode_steps=5000,
kwargs={
"task_cls": tasks.cartpole_continuous_balancing.CartPoleContinuousBalancing,
"agent_rate": 1000,
"physics_rate": 1000,
"real_time_factor": max_float,
},
)
register(
id="CartPoleContinuousSwingup-Gazebo-v0",
entry_point="gym_ignition.runtimes.gazebo_runtime:GazeboRuntime",
max_episode_steps=5000,
kwargs={
"task_cls": tasks.cartpole_continuous_swingup.CartPoleContinuousSwingup,
"agent_rate": 1000,
"physics_rate": 1000,
"real_time_factor": max_float,
},
)
================================================
FILE: python/gym_ignition_environments/models/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import cartpole, icub, panda, pendulum
================================================
FILE: python/gym_ignition_environments/models/cartpole.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from typing import List
from gym_ignition.scenario import model_with_file, model_wrapper
from gym_ignition.utils.scenario import get_unique_model_name
from scenario import core as scenario
class CartPole(model_wrapper.ModelWrapper, model_with_file.ModelWithFile):
def __init__(
self,
world: scenario.World,
position: List[float] = (0.0, 0.0, 0.0),
orientation: List[float] = (1.0, 0, 0, 0),
model_file: str = None,
):
# Get a unique model name
model_name = get_unique_model_name(world, "cartpole")
# Initial pose
initial_pose = scenario.Pose(position, orientation)
# Get the model file (URDF or SDF) and allow to override it
if model_file is None:
model_file = CartPole.get_model_file()
# Insert the model
ok_model = world.to_gazebo().insert_model(model_file, initial_pose, model_name)
if not ok_model:
raise RuntimeError("Failed to insert model")
# Get the model
model = world.get_model(model_name)
# Initialize base class
super().__init__(model=model)
@classmethod
def get_model_file(cls) -> str:
import gym_ignition_models
return gym_ignition_models.get_model_file("cartpole")
================================================
FILE: python/gym_ignition_environments/models/icub.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
from typing import List
from gym_ignition import scenario
from gym_ignition.utils.scenario import get_unique_model_name
from scenario import core as scenario_core
class ICubGazeboABC(
scenario.model_wrapper.ModelWrapper, scenario.model_with_file.ModelWithFile, abc.ABC
):
DOFS = 32
NUM_LINKS = 39
NUM_JOINTS = 32
initial_positions = {
# Left leg
"l_knee": -1.05,
"l_ankle_pitch": -0.57,
"l_ankle_roll": -0.024,
"l_hip_pitch": 0.48,
"l_hip_roll": 0.023,
"l_hip_yaw": -0.005,
# Left arm
"l_elbow": 0.54,
"l_wrist_pitch": 0.0,
"l_wrist_prosup": 0.0,
"l_wrist_yaw": 0.0,
"l_shoulder_pitch": -0.159,
"l_shoulder_roll": 0.435,
"l_shoulder_yaw": 0.183,
# Head
"neck_pitch": 0.0,
"neck_roll": 0.0,
"neck_yaw": 0.0,
# Right leg
"r_knee": -1.05,
"r_ankle_pitch": -0.57,
"r_ankle_roll": -0.024,
"r_hip_pitch": 0.48,
"r_hip_roll": 0.023,
"r_hip_yaw": -0.005,
# Right arm
"r_elbow": 0.54,
"r_wrist_pitch": 0.0,
"r_wrist_prosup": 0.0,
"r_wrist_yaw": 0.0,
"r_shoulder_pitch": -0.159,
"r_shoulder_roll": 0.435,
"r_shoulder_yaw": 0.183,
# Torso
"torso_pitch": 0.1,
"torso_roll": 0.0,
"torso_yaw": 0.0,
}
def __init__(
self,
world: scenario_core.World,
position: List[float],
orientation: List[float],
model_file: str = None,
):
# Get a unique model name
model_name = get_unique_model_name(world, "icub")
# Initial pose
initial_pose = scenario_core.Pose(position, orientation)
# Get the model file (URDF or SDF) and allow to override it
if model_file is None:
model_file = self.get_model_file()
# Insert the model
ok_model = world.to_gazebo().insert_model(model_file, initial_pose, model_name)
if not ok_model:
raise RuntimeError("Failed to insert model")
# Get the model
model = world.get_model(model_name)
# Initialize base class
super().__init__(model=model)
# Store the initial positions
q0 = list(self.initial_positions.values())
joint_names = list(self.initial_positions.keys())
assert self.dofs() == len(q0) == len(joint_names)
ok_q0 = self.to_gazebo().reset_joint_positions(q0, joint_names)
assert ok_q0, "Failed to set initial position"
class ICubGazebo(ICubGazeboABC):
def __init__(
self,
world: scenario_core.World,
position: List[float] = (0.0, 0.0, 0.572),
orientation: List[float] = (0, 0, 0, 1.0),
model_file: str = None,
):
super().__init__(
world=world,
position=position,
orientation=orientation,
model_file=model_file,
)
@classmethod
def get_model_file(cls) -> str:
import gym_ignition_models
return gym_ignition_models.get_model_file("iCubGazeboV2_5")
class ICubGazeboSimpleCollisions(ICubGazeboABC):
def __init__(
self,
world: scenario_core.World,
position: List[float] = (0.0, 0.0, 0.572),
orientation: List[float] = (0, 0, 0, 1.0),
model_file: str = None,
):
super().__init__(
world=world,
position=position,
orientation=orientation,
model_file=model_file,
)
@classmethod
def get_model_file(cls) -> str:
import gym_ignition_models
return gym_ignition_models.get_model_file("iCubGazeboSimpleCollisionsV2_5")
================================================
FILE: python/gym_ignition_environments/models/panda.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from typing import List
from gym_ignition.scenario import model_with_file, model_wrapper
from gym_ignition.utils.scenario import get_unique_model_name
from scenario import core as scenario
class Panda(model_wrapper.ModelWrapper, model_with_file.ModelWithFile):
def __init__(
self,
world: scenario.World,
position: List[float] = (0.0, 0.0, 0.0),
orientation: List[float] = (1.0, 0, 0, 0),
model_file: str = None,
):
# Get a unique model name
model_name = get_unique_model_name(world, "panda")
# Initial pose
initial_pose = scenario.Pose(position, orientation)
# Get the default model description (URDF or SDF) allowing to pass a custom model
if model_file is None:
model_file = Panda.get_model_file()
# Insert the model
ok_model = world.to_gazebo().insert_model(model_file, initial_pose, model_name)
if not ok_model:
raise RuntimeError("Failed to insert model")
# Get the model
model = world.get_model(model_name)
# Initial joint configuration
model.to_gazebo().reset_joint_positions(
[0, -0.785, 0, -2.356, 0, 1.571, 0.785],
[name for name in model.joint_names() if "panda_joint" in name],
)
# From:
# https://github.com/mkrizmancic/franka_gazebo/blob/master/config/default.yaml
pid_gains_1000hz = {
"panda_joint1": scenario.PID(50, 0, 20),
"panda_joint2": scenario.PID(10000, 0, 500),
"panda_joint3": scenario.PID(100, 0, 10),
"panda_joint4": scenario.PID(1000, 0, 50),
"panda_joint5": scenario.PID(100, 0, 10),
"panda_joint6": scenario.PID(100, 0, 10),
"panda_joint7": scenario.PID(10, 0.5, 0.1),
"panda_finger_joint1": scenario.PID(100, 0, 50),
"panda_finger_joint2": scenario.PID(100, 0, 50),
}
# Check that all joints have gains
if not set(model.joint_names()) == set(pid_gains_1000hz.keys()):
raise ValueError("The number of PIDs does not match the number of joints")
# Set the PID gains
for joint_name, pid in pid_gains_1000hz.items():
if not model.get_joint(joint_name).set_pid(pid=pid):
raise RuntimeError(f"Failed to set the PID of joint '{joint_name}'")
# Set the default PID update period
assert model.set_controller_period(1000.0)
# Initialize base class
super().__init__(model=model)
@classmethod
def get_model_file(cls) -> str:
import gym_ignition_models
return gym_ignition_models.get_model_file("panda")
================================================
FILE: python/gym_ignition_environments/models/pendulum.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from typing import List
from gym_ignition.scenario import model_with_file, model_wrapper
from gym_ignition.utils.scenario import get_unique_model_name
from scenario import core as scenario
class Pendulum(model_wrapper.ModelWrapper, model_with_file.ModelWithFile):
def __init__(
self,
world: scenario.World,
position: List[float] = (0.0, 0.0, 0.0),
orientation: List[float] = (1.0, 0, 0, 0),
model_file: str = None,
):
# Get a unique model name
model_name = get_unique_model_name(world, "pendulum")
# Initial pose
initial_pose = scenario.Pose(position, orientation)
# Get the model file (URDF or SDF) and allow to override it
if model_file is None:
model_file = Pendulum.get_model_file()
# Insert the model
ok_model = world.to_gazebo().insert_model(model_file, initial_pose, model_name)
if not ok_model:
raise RuntimeError("Failed to insert model")
# Get the model
model = world.get_model(model_name)
# Initialize base class
super().__init__(model=model)
@classmethod
def get_model_file(cls) -> str:
import gym_ignition_models
return gym_ignition_models.get_model_file("pendulum")
================================================
FILE: python/gym_ignition_environments/randomizers/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import cartpole, cartpole_no_rand
================================================
FILE: python/gym_ignition_environments/randomizers/cartpole.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
from typing import Union
from gym_ignition import randomizers, utils
from gym_ignition.randomizers import gazebo_env_randomizer
from gym_ignition.randomizers.gazebo_env_randomizer import MakeEnvCallable
from gym_ignition.randomizers.model.sdf import Distribution, Method, UniformParams
from gym_ignition.utils import misc
from gym_ignition_environments import tasks
from gym_ignition_environments.models import cartpole
from scenario import gazebo as scenario
# Tasks that are supported by this randomizer. Used for type hinting.
SupportedTasks = Union[
tasks.cartpole_discrete_balancing.CartPoleDiscreteBalancing,
tasks.cartpole_continuous_swingup.CartPoleContinuousSwingup,
tasks.cartpole_continuous_balancing.CartPoleContinuousBalancing,
]
class CartpoleRandomizersMixin(
randomizers.abc.TaskRandomizer,
randomizers.abc.PhysicsRandomizer,
randomizers.abc.ModelDescriptionRandomizer,
abc.ABC,
):
"""
Mixin that collects the implementation of task, model and physics randomizations for
cartpole environments.
"""
def __init__(self, randomize_physics_after_rollouts: int = 0):
# Initialize base classes
randomizers.abc.TaskRandomizer.__init__(self)
randomizers.abc.PhysicsRandomizer.__init__(
self, randomize_after_rollouts_num=randomize_physics_after_rollouts
)
randomizers.abc.ModelDescriptionRandomizer.__init__(self)
# SDF randomizer
self._sdf_randomizer = None
# ===========================
# PhysicsRandomizer interface
# ===========================
def get_engine(self):
return scenario.PhysicsEngine_dart
def randomize_physics(self, task: SupportedTasks, **kwargs) -> None:
gravity_z = task.np_random.normal(loc=-9.8, scale=0.2)
if not task.world.to_gazebo().set_gravity((0, 0, gravity_z)):
raise RuntimeError("Failed to set the gravity")
# ========================
# TaskRandomizer interface
# ========================
def randomize_task(self, task: SupportedTasks, **kwargs) -> None:
# Remove the model from the world
self._clean_world(task=task)
if "gazebo" not in kwargs:
raise ValueError("gazebo kwarg not passed to the task randomizer")
gazebo = kwargs["gazebo"]
# Execute a paused run to process model removal
if not gazebo.run(paused=True):
raise RuntimeError("Failed to execute a paused Gazebo run")
# Generate a random model description
random_model = self.randomize_model_description(task=task)
# Insert a new model in the world
self._populate_world(task=task, cartpole_model=random_model)
# Execute a paused run to process model insertion
if not gazebo.run(paused=True):
raise RuntimeError("Failed to execute a paused Gazebo run")
# ====================================
# ModelDescriptionRandomizer interface
# ====================================
def randomize_model_description(self, task: SupportedTasks, **kwargs) -> str:
randomizer = self._get_sdf_randomizer(task=task)
sdf = misc.string_to_file(randomizer.sample())
return sdf
# ===============
# Private Methods
# ===============
def _get_sdf_randomizer(
self, task: SupportedTasks
) -> randomizers.model.sdf.SDFRandomizer:
if self._sdf_randomizer is not None:
return self._sdf_randomizer
# Get the model file
urdf_model_file = cartpole.CartPole.get_model_file()
# Convert the URDF to SDF
sdf_model_string = scenario.urdffile_to_sdfstring(urdf_model_file)
# Write the SDF string to a temp file
sdf_model = utils.misc.string_to_file(sdf_model_string)
# Create and initialize the randomizer
sdf_randomizer = randomizers.model.sdf.SDFRandomizer(sdf_model=sdf_model)
# Use the RNG of the task
sdf_randomizer.rng = task.np_random
# Randomize the mass of all links
sdf_randomizer.new_randomization().at_xpath("*/link/inertial/mass").method(
Method.Additive
).sampled_from(
Distribution.Uniform, UniformParams(low=-0.2, high=0.2)
).force_positive().add()
# Process the randomization
sdf_randomizer.process_data()
assert len(sdf_randomizer.get_active_randomizations()) > 0
# Store and return the randomizer
self._sdf_randomizer = sdf_randomizer
return self._sdf_randomizer
@staticmethod
def _clean_world(task: SupportedTasks) -> None:
# Remove the model from the simulation
if task.model_name is not None and task.model_name in task.world.model_names():
if not task.world.to_gazebo().remove_model(task.model_name):
raise RuntimeError("Failed to remove the cartpole from the world")
@staticmethod
def _populate_world(task: SupportedTasks, cartpole_model: str = None) -> None:
# Insert a new cartpole.
# It will create a unique name if there are clashing.
model = cartpole.CartPole(world=task.world, model_file=cartpole_model)
# Store the model name in the task
task.model_name = model.name()
class CartpoleEnvRandomizer(
gazebo_env_randomizer.GazeboEnvRandomizer, CartpoleRandomizersMixin
):
"""
Concrete implementation of cartpole environments randomization.
"""
def __init__(self, env: MakeEnvCallable, num_physics_rollouts: int = 0):
# Initialize the mixin
CartpoleRandomizersMixin.__init__(
self, randomize_physics_after_rollouts=num_physics_rollouts
)
# Initialize the environment randomizer
gazebo_env_randomizer.GazeboEnvRandomizer.__init__(
self, env=env, physics_randomizer=self
)
================================================
FILE: python/gym_ignition_environments/randomizers/cartpole_no_rand.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from typing import Union
from gym_ignition.randomizers import gazebo_env_randomizer
from gym_ignition.randomizers.gazebo_env_randomizer import MakeEnvCallable
from gym_ignition_environments import tasks
from gym_ignition_environments.models import cartpole
# Tasks that are supported by this randomizer. Used for type hinting.
SupportedTasks = Union[
tasks.cartpole_discrete_balancing.CartPoleDiscreteBalancing,
tasks.cartpole_continuous_swingup.CartPoleContinuousSwingup,
tasks.cartpole_continuous_balancing.CartPoleContinuousBalancing,
]
class CartpoleEnvNoRandomizations(gazebo_env_randomizer.GazeboEnvRandomizer):
"""
Dummy environment randomizer for cartpole tasks.
Check :py:class:`~gym_ignition_environments.randomizers.cartpole.CartpoleRandomizersMixin`
for an example that randomizes the task, the physics, and the model.
"""
def __init__(self, env: MakeEnvCallable):
super().__init__(env=env)
def randomize_task(self, task: SupportedTasks, **kwargs) -> None:
"""
Prepare the scene for cartpole tasks. It simply removes the cartpole of the
previous rollout and inserts a new one in the default state. Then, the active
Task will reset the state of the cartpole depending on the implemented
decision-making logic.
"""
if "gazebo" not in kwargs:
raise ValueError("gazebo kwarg not passed to the task randomizer")
gazebo = kwargs["gazebo"]
# Remove the model from the simulation
if task.model_name is not None and task.model_name in task.world.model_names():
if not task.world.to_gazebo().remove_model(task.model_name):
raise RuntimeError("Failed to remove the cartpole from the world")
# Execute a paused run to process model removal
if not gazebo.run(paused=True):
raise RuntimeError("Failed to execute a paused Gazebo run")
# Insert a new cartpole model
model = cartpole.CartPole(world=task.world)
# Store the model name in the task
task.model_name = model.name()
# Execute a paused run to process model insertion
if not gazebo.run(paused=True):
raise RuntimeError("Failed to execute a paused Gazebo run")
================================================
FILE: python/gym_ignition_environments/tasks/__init__.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
from . import (
cartpole_continuous_balancing,
cartpole_continuous_swingup,
cartpole_discrete_balancing,
pendulum_swingup,
)
================================================
FILE: python/gym_ignition_environments/tasks/cartpole_continuous_balancing.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
from typing import Tuple
import gym
import numpy as np
from gym_ignition.base import task
from gym_ignition.utils.typing import (
Action,
ActionSpace,
Observation,
ObservationSpace,
Reward,
)
from scenario import core as scenario
class CartPoleContinuousBalancing(task.Task, abc.ABC):
def __init__(self, agent_rate: float, reward_cart_at_center: bool = True, **kwargs):
# Initialize the Task base class
task.Task.__init__(self, agent_rate=agent_rate)
# Name of the cartpole model
self.model_name = None
# Space for resetting the task
self.reset_space = None
# Private attributes
self._reward_cart_at_center = reward_cart_at_center
# Variables limits
self._x_threshold = 2.4 # m
self._dx_threshold = 20.0 # m /s
self._q_threshold = np.deg2rad(12) # rad
self._dq_threshold = np.deg2rad(3 * 360) # rad / s
def create_spaces(self) -> Tuple[ActionSpace, ObservationSpace]:
# Create the action space
max_force = 50.0 # Nm
action_space = gym.spaces.Box(
low=np.array([-max_force]), high=np.array([max_force]), dtype=np.float32
)
# Configure reset limits
high = np.array(
[
self._x_threshold, # x
self._dx_threshold, # dx
self._q_threshold, # q
self._dq_threshold, # dq
]
)
# Configure the reset space
self.reset_space = gym.spaces.Box(low=-high, high=high, dtype=np.float32)
# Configure the observation space
obs_high = high.copy() * 1.2
observation_space = gym.spaces.Box(
low=-obs_high, high=obs_high, dtype=np.float32
)
return action_space, observation_space
def set_action(self, action: Action) -> None:
# Get the force value
force = action.tolist()[0]
# Set the force value
model = self.world.get_model(self.model_name)
ok_force = model.get_joint("linear").set_generalized_force_target(force)
if not ok_force:
raise RuntimeError("Failed to set the force to the cart")
def get_observation(self) -> Observation:
# Get the model
model = self.world.get_model(self.model_name)
# Get the new joint positions and velocities
q, x = model.joint_positions(["pivot", "linear"])
dq, dx = model.joint_velocities(["pivot", "linear"])
# Create the observation
observation = Observation(np.array([x, dx, q, dq]))
# Return the observation
return observation
def get_reward(self) -> Reward:
# Calculate the reward
reward = 1.0 if not self.is_done() else 0.0
if self._reward_cart_at_center:
# Get the observation
x, dx, _, _ = self.get_observation()
reward = (
reward
- 0.10 * np.abs(x)
- 0.10 * np.abs(dx)
- 10.0 * (x >= self._x_threshold)
)
return reward
def is_done(self) -> bool:
# Get the observation
observation = self.get_observation()
# The environment is done if the observation is outside its space
done = not self.reset_space.contains(observation)
return done
def reset_task(self) -> None:
if self.model_name not in self.world.model_names():
raise RuntimeError("Cartpole model not found in the world")
# Get the model
model = self.world.get_model(self.model_name)
# Control the cart in force mode
linear = model.get_joint("linear")
ok_control_mode = linear.set_control_mode(scenario.JointControlMode_force)
if not ok_control_mode:
raise RuntimeError("Failed to change the control mode of the cartpole")
# Create a new cartpole state
x, dx, q, dq = self.np_random.uniform(low=-0.05, high=0.05, size=(4,))
# Reset the cartpole state
ok_reset_pos = model.to_gazebo().reset_joint_positions(
[x, q], ["linear", "pivot"]
)
ok_reset_vel = model.to_gazebo().reset_joint_velocities(
[dx, dq], ["linear", "pivot"]
)
if not (ok_reset_pos and ok_reset_vel):
raise RuntimeError("Failed to reset the cartpole state")
================================================
FILE: python/gym_ignition_environments/tasks/cartpole_continuous_swingup.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
from typing import Tuple
import gym
import numpy as np
from gym_ignition.base import task
from gym_ignition.utils.typing import (
Action,
ActionSpace,
Observation,
ObservationSpace,
Reward,
)
from scenario import core as scenario
class CartPoleContinuousSwingup(task.Task, abc.ABC):
def __init__(self, agent_rate: float, reward_cart_at_center: bool = True, **kwargs):
# Initialize the Task base class
task.Task.__init__(self, agent_rate=agent_rate)
# Name of the cartpole model
self.model_name = None
# Space for resetting the task
self.reset_space = None
# Private attributes
self._reward_cart_at_center = reward_cart_at_center
# Variables limits
self._x_threshold = 2.4 # m
self._dx_threshold = 20.0 # m /s
self._q_threshold = np.deg2rad(5 * 360) # rad
self._dq_threshold = np.deg2rad(3 * 360) # rad / s
def create_spaces(self) -> Tuple[ActionSpace, ObservationSpace]:
# Create the action space
max_force = 200.0 # Nm
action_space = gym.spaces.Box(
low=np.array([-max_force]), high=np.array([max_force]), dtype=np.float32
)
# Configure reset limits
high = np.array(
[
self._x_threshold, # x
self._dx_threshold, # dx
self._q_threshold, # q
self._dq_threshold, # dq
]
)
# Configure the reset space
self.reset_space = gym.spaces.Box(low=-high, high=high, dtype=np.float32)
# Configure the observation space
obs_high = high.copy() * 1.2
observation_space = gym.spaces.Box(
low=-obs_high, high=obs_high, dtype=np.float32
)
return action_space, observation_space
def set_action(self, action: Action) -> None:
# Get the force value
force = action.tolist()[0]
# Set the force value
model = self.world.get_model(self.model_name)
ok_force = model.get_joint("linear").set_generalized_force_target(force)
if not ok_force:
raise RuntimeError("Failed to set the force to the cart")
def get_observation(self) -> Observation:
# Get the model
model = self.world.get_model(self.model_name)
# Get the new joint positions and velocities
q, x = model.joint_positions(["pivot", "linear"])
dq, dx = model.joint_velocities(["pivot", "linear"])
# Create the observation
observation = Observation(np.array([x, dx, q, dq]))
# Return the observation
return observation
def get_reward(self) -> Reward:
# Get the model
model = self.world.get_model(self.model_name)
# Get the pendulum position
q = model.get_joint("pivot").position()
# Get the cart state
x = model.get_joint("linear").position()
dx = model.get_joint("linear").velocity()
# Reward is [0, 1] for q=[0, pi]
reward = (np.cos(q) + 1) / 2
# Penalize cart velocities
reward -= 0.1 * (dx ** 2)
# Penalize positions close to the end of the rail
reward -= 10.0 * (x >= 0.8 * self._x_threshold)
return reward
def is_done(self) -> bool:
# Get the observation
observation = self.get_observation()
# The environment is done if the observation is outside its space
done = not self.reset_space.contains(observation)
return done
def reset_task(self) -> None:
if self.model_name not in self.world.model_names():
raise RuntimeError("Cartpole model not found in the world")
# Get the model
model = self.world.get_model(self.model_name)
# Control the cart in force mode
linear = model.get_joint("linear")
ok_control_mode = linear.set_control_mode(scenario.JointControlMode_force)
if not ok_control_mode:
raise RuntimeError("Failed to change the control mode of the cartpole")
# Create a new cartpole state
q = np.pi - np.deg2rad(self.np_random.uniform(low=-60, high=60))
x, dx, dq = self.np_random.uniform(low=-0.05, high=0.05, size=(3,))
# Reset the cartpole state
ok_reset_pos = model.to_gazebo().reset_joint_positions(
[x, q], ["linear", "pivot"]
)
ok_reset_vel = model.to_gazebo().reset_joint_velocities(
[dx, dq], ["linear", "pivot"]
)
if not (ok_reset_pos and ok_reset_vel):
raise RuntimeError("Failed to reset the cartpole state")
================================================
FILE: python/gym_ignition_environments/tasks/cartpole_discrete_balancing.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
from typing import Tuple
import gym
import numpy as np
from gym_ignition.base import task
from gym_ignition.utils.typing import (
Action,
ActionSpace,
Observation,
ObservationSpace,
Reward,
)
from scenario import core as scenario
class CartPoleDiscreteBalancing(task.Task, abc.ABC):
def __init__(
self, agent_rate: float, reward_cart_at_center: bool = True, **kwargs
) -> None:
# Initialize the Task base class
task.Task.__init__(self, agent_rate=agent_rate)
# Name of the cartpole model
self.model_name = None
# Space for resetting the task
self.reset_space = None
# Private attributes
self._force_mag = 20.0 # Nm
self._reward_cart_at_center = reward_cart_at_center
# Variables limits
self._x_threshold = 2.4 # m
self._dx_threshold = 20.0 # m /s
self._q_threshold = np.deg2rad(12) # rad
self._dq_threshold = np.deg2rad(3 * 360) # rad / s
def create_spaces(self) -> Tuple[ActionSpace, ObservationSpace]:
# Configure action space: [0, 1]
action_space = gym.spaces.Discrete(2)
# Configure reset limits
high = np.array(
[
self._x_threshold, # x
self._dx_threshold, # dx
self._q_threshold, # q
self._dq_threshold, # dq
]
)
# Configure the reset space
self.reset_space = gym.spaces.Box(low=-high, high=high, dtype=np.float32)
# Configure the observation space
obs_high = high.copy() * 1.2
observation_space = gym.spaces.Box(
low=-obs_high, high=obs_high, dtype=np.float32
)
return action_space, observation_space
def set_action(self, action: Action) -> None:
# Calculate the force
force = self._force_mag if action == 1 else -self._force_mag
# Set the force value
model = self.world.get_model(self.model_name)
ok_force = model.get_joint("linear").set_generalized_force_target(force)
if not ok_force:
raise RuntimeError("Failed to set the force to the cart")
def get_observation(self) -> Observation:
# Get the model
model = self.world.get_model(self.model_name)
# Get the new joint positions and velocities
q, x = model.joint_positions(["pivot", "linear"])
dq, dx = model.joint_velocities(["pivot", "linear"])
# Create the observation
observation = Observation(np.array([x, dx, q, dq]))
# Return the observation
return observation
def get_reward(self) -> Reward:
# Calculate the reward
reward = 1.0 if not self.is_done() else 0.0
if self._reward_cart_at_center:
# Get the observation
x, dx, _, _ = self.get_observation()
reward = (
reward
- 0.10 * np.abs(x)
- 0.10 * np.abs(dx)
- 10.0 * (x >= 0.9 * self._x_threshold)
)
return reward
def is_done(self) -> bool:
# Get the observation
observation = self.get_observation()
# The environment is done if the observation is outside its space
done = not self.reset_space.contains(observation)
return done
def reset_task(self) -> None:
if self.model_name not in self.world.model_names():
raise RuntimeError("Cartpole model not found in the world")
# Get the model
model = self.world.get_model(self.model_name)
# Control the cart in force mode
linear = model.get_joint("linear")
ok_control_mode = linear.set_control_mode(scenario.JointControlMode_force)
if not ok_control_mode:
raise RuntimeError("Failed to change the control mode of the cartpole")
# Create a new cartpole state
x, dx, q, dq = self.np_random.uniform(low=-0.05, high=0.05, size=(4,))
# Reset the cartpole state
ok_reset_pos = model.to_gazebo().reset_joint_positions(
[x, q], ["linear", "pivot"]
)
ok_reset_vel = model.to_gazebo().reset_joint_velocities(
[dx, dq], ["linear", "pivot"]
)
if not (ok_reset_pos and ok_reset_vel):
raise RuntimeError("Failed to reset the cartpole state")
================================================
FILE: python/gym_ignition_environments/tasks/pendulum_swingup.py
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import abc
from typing import Tuple
import gym
import numpy as np
from gym_ignition.base import task
from gym_ignition.utils.typing import (
Action,
ActionSpace,
Observation,
ObservationSpace,
Reward,
)
from scenario import core as scenario
class PendulumSwingUp(task.Task, abc.ABC):
def __init__(self, agent_rate: float, **kwargs):
# Initialize the Task base class
task.Task.__init__(self, agent_rate=agent_rate)
# Name of the pendulum model
self.model_name = None
# Limits
self._max_speed = 10.0
self._max_torque = 50.0
def create_spaces(self) -> Tuple[ActionSpace, ObservationSpace]:
action_space = gym.spaces.Box(
low=-self._max_torque, high=self._max_torque, shape=(1,), dtype=np.float32
)
high = np.array([1.0, 1.0, self._max_speed]) # cos(theta) # sin(theta)
observation_space = gym.spaces.Box(low=-high, high=high, dtype=np.float32)
return action_space, observation_space
def set_action(self, action: Action) -> None:
# Get the force value
force = action.tolist()[0]
# Set the force value
model = self.world.get_model(self.model_name)
ok_force = model.get_joint("pivot").set_generalized_force_target(force)
if not ok_force:
raise RuntimeError("Failed to set the force to the pendulum")
def get_observation(self) -> Observation:
# Get the model
model = self.world.get_model(self.model_name)
# Get the new joint position and velocity
q = model.get_joint("pivot").position()
dq = model.get_joint("pivot").velocity()
# Create the observation
observation = Observation(np.array([np.cos(q), np.sin(q), dq]))
# Return the observation
return observation
def get_reward(self) -> Reward:
# This environment is done only if the observation goes outside its limits.
# Since it can happen only when velocity is too high, penalize this happening.
cost = 100.0 if self.is_done() else 0.0
# Get the model
model = self.world.get_model(self.model_name)
# Get the pendulum state
q = model.get_joint("pivot").position()
dq = model.get_joint("pivot").velocity()
tau = model.get_joint("pivot").generalized_force_target()
# Calculate the cost
cost += (q ** 2) + 0.1 * (dq ** 2) + 0.001 * (tau ** 2)
return Reward(-cost)
def is_done(self) -> bool:
# Get the observation
observation = self.get_observation()
# The environment is done if the observation is outside its space
done = not self.observation_space.contains(observation)
return done
def reset_task(self) -> None:
if self.model_name not in self.world.model_names():
raise RuntimeError("The cartpole model was not inserted in the world")
# Get the model
model = self.world.get_model(self.model_name)
# Control the pendulum in force mode
pivot = model.get_joint("pivot")
ok_control_mode = pivot.set_control_mode(scenario.JointControlMode_force)
if not ok_control_mode:
raise RuntimeError("Failed to change the control mode of the pendulum")
# Sample an observation
cos_q, sin_q, dq = self.observation_space.sample()
# Compute the angle
q = np.arctan2(sin_q, cos_q)
# Convert to float
q, dq = float(q), float(dq)
# Reset the pendulum state
ok_reset = pivot.to_gazebo().reset(q, dq)
if not ok_reset:
raise RuntimeError("Failed to reset the pendulum state")
================================================
FILE: scenario/CMakeLists.txt
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
cmake_minimum_required(VERSION 3.16)
project(scenario VERSION 1.3.1)
# Add custom functions / macros
list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake)
# C++ standard
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
# Include useful features
include(GNUInstallDirs)
# Build type
if(NOT CMAKE_CONFIGURATION_TYPES)
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE "Release" CACHE STRING
"Choose the type of build, recommended options are: Debug or Release" FORCE)
endif()
set(SCENARIO_BUILD_TYPES "Debug" "Release" "MinSizeRel" "RelWithDebInfo")
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS ${SCENARIO_BUILD_TYPES})
endif()
# Expose shared or static compilation
set(SCENARIO_BUILD_SHARED_LIBRARY TRUE
CACHE BOOL "Compile libraries as shared libraries")
set(BUILD_SHARED_LIBS ${SCENARIO_BUILD_SHARED_LIBRARY})
# Use -fPIC even if statically compiled
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
# Tweak linker flags in Linux
if(UNIX AND NOT APPLE)
if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug")
get_filename_component(LINKER_BIN ${CMAKE_LINKER} NAME)
if("${LINKER_BIN}" STREQUAL "ld")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--unresolved-symbols=report-all")
endif()
endif()
endif()
# Control where binaries and libraries are placed in the build folder.
# This simplifies tests running in Windows.
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${CMAKE_INSTALL_BINDIR}")
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${CMAKE_INSTALL_LIBDIR}")
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${CMAKE_INSTALL_LIBDIR}")
# Get include-what-you-use information when compiling
option(SCENARIO_USE_IWYU "Get the output of include-what-you-use" OFF)
mark_as_advanced(SCENARIO_USE_IWYU)
if(SCENARIO_USE_IWYU)
find_program(IWYU_PATH NAMES include-what-you-use iwyu)
if(IWYU_PATH)
set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE ${IWYU_PATH})
endif()
endif()
# Settings for RPATH
if(NOT MSVC)
option(ENABLE_RPATH "Enable RPATH installation" TRUE)
mark_as_advanced(ENABLE_RPATH)
endif()
# Dependencies
add_subdirectory(deps)
if(${CMAKE_VERSION} VERSION_GREATER 3.15)
cmake_policy(SET CMP0094 NEW)
endif()
# Enable custom options required to package the CMake project from
# either setup.py or tools like pypa/pip or pypa/build.
set(SCENARIO_CALL_FROM_SETUP_PY FALSE
CACHE BOOL "Configure the project to be compiled from setuptools")
find_package(SWIG 4.0 QUIET)
option(SCENARIO_ENABLE_BINDINGS "Enable SWIG bindings" ${SWIG_FOUND})
# Find Python only if requested or if SWIG was automatically found
if(SCENARIO_ENABLE_BINDINGS OR SCENARIO_CALL_FROM_SETUP_PY)
# Find virtualenv's before system's interpreters
set(Python3_FIND_VIRTUALENV "FIRST" CACHE STRING
"Configure the detection of virtual environments")
set(Python3_FIND_VIRTUALENV_TYPES "FIRST" "ONLY" "STANDARD")
mark_as_advanced(Python3_FIND_VIRTUALENV)
set_property(CACHE Python3_FIND_VIRTUALENV PROPERTY STRINGS ${Python3_FIND_VIRTUALENV_TYPES})
# Find Python3
find_package(Python3 COMPONENTS Interpreter Development REQUIRED)
message(STATUS "Using Python: ${Python3_EXECUTABLE}")
endif()
# Select the appropriate install prefix used throughout the project
set(SCENARIO_INSTALL_BINDIR ${CMAKE_INSTALL_BINDIR})
set(SCENARIO_INSTALL_LIBDIR ${CMAKE_INSTALL_LIBDIR})
set(SCENARIO_INSTALL_INCLUDEDIR ${CMAKE_INSTALL_INCLUDEDIR})
# Adjust RPATH dependending on the installation type
if(SCENARIO_ENABLE_BINDINGS AND NOT SCENARIO_CALL_FROM_SETUP_PY)
# Convert folder separators to CMake style (could be necessary in Windows)
file(TO_CMAKE_PATH "${Python3_SITELIB}" python3_sitelib_cleaned)
# Add the libraries installed in the Python site-package folder
set(EXTRA_RPATH_DIRS
"${python3_sitelib_cleaned}"
"${python3_sitelib_cleaned}/scenario/bindings")
unset(python3_sitelib_cleaned)
else()
# Add the libraries installed in the Python site-package folder
# (that in this case is CMAKE_INSTALL_PREFIX)
set(EXTRA_RPATH_DIRS
"${CMAKE_INSTALL_PREFIX}"
"${CMAKE_INSTALL_PREFIX}/scenario/bindings")
endif()
# Configure RPATH
include(AddInstallRPATHSupport)
add_install_rpath_support(
BIN_DIRS
"${CMAKE_INSTALL_PREFIX}/${SCENARIO_INSTALL_BINDIR}"
LIB_DIRS
"${CMAKE_INSTALL_PREFIX}/${SCENARIO_INSTALL_LIBDIR}"
"${CMAKE_INSTALL_PREFIX}/${SCENARIO_INSTALL_LIBDIR}/scenario/plugins"
"${EXTRA_RPATH_DIRS}"
INSTALL_NAME_DIR
"${CMAKE_INSTALL_PREFIX}/${SCENARIO_INSTALL_LIBDIR}"
DEPENDS ENABLE_RPATH
USE_LINK_PATH)
# Find a supported Ignition distribution
if(NOT IGNITION_DISTRIBUTION)
include(FindIgnitionDistribution)
set(SUPPORTED_IGNITION_DISTRIBUTIONS
"Fortress"
"Edifice"
"Dome"
"Citadel")
foreach(distribution IN LISTS SUPPORTED_IGNITION_DISTRIBUTIONS)
find_ignition_distribution(
CODENAME ${distribution}
PACKAGES
ignition-gazebo
REQUIRED FALSE)
if(${${distribution}_FOUND})
message(STATUS "Found Ignition ${distribution}")
# Select Ignition distribution
set(IGNITION_DISTRIBUTION "${distribution}" CACHE
STRING "The Ignition distribution found in the system")
set_property(CACHE IGNITION_DISTRIBUTION PROPERTY
STRINGS ${SUPPORTED_IGNITION_DISTRIBUTIONS})
break()
endif()
endforeach()
endif()
if(NOT IGNITION_DISTRIBUTION OR "${IGNITION_DISTRIBUTION}" STREQUAL "")
set(USE_IGNITION FALSE)
else()
set(USE_IGNITION TRUE)
endif()
option(SCENARIO_USE_IGNITION
"Build C++ code depending on Ignition"
${USE_IGNITION})
# Fail if Ignition is enabled but no compatible distribution was found
if(SCENARIO_USE_IGNITION AND "${IGNITION_DISTRIBUTION}" STREQUAL "")
message(FATAL_ERROR "Failed to find a compatible Ignition Gazebo distribution")
endif()
# Alias the Ignition targets so that we can link against different distributions
if(SCENARIO_USE_IGNITION)
include(ImportTargets${IGNITION_DISTRIBUTION})
endif()
# Helper for exporting targets
include(InstallBasicPackageFiles)
# =========
# SCENARI/O
# =========
add_subdirectory(src)
# ========
# BINDINGS
# ========
# Require to find Ignition libraries when packaging for PyPI
if(SCENARIO_CALL_FROM_SETUP_PY AND NOT USE_IGNITION)
message(FATAL_ERROR "Found no Ignition distribution for PyPI package")
endif()
if(SCENARIO_ENABLE_BINDINGS)
add_subdirectory(bindings)
endif()
# Add unistall target
include(AddUninstallTarget)
================================================
FILE: scenario/README.md
================================================
# ScenarIO
[](https://isocpp.org)
[][scenario]
[][gym-ignition]
[](https://github.com/robotology/gym-ignition/actions)
[][pypi]
[][pypi]
[][pypi]
[][pypi]
[][pypi]
[pypi]: https://pypi.org/project/scenario/
[gym-ignition]: https://github.com/robotology/gym-ignition
[scenario]: https://github.com/robotology/gym-ignition/tree/master/scenario
**SCEN**e interf**A**ces for **R**obot **I**nput / **O**utput.
||||
|:---:|:---:|:---:|
| ![][pendulum] | ![][panda] | ![][icub] |
[icub]: https://user-images.githubusercontent.com/469199/99262746-9e021a80-281e-11eb-9df1-d70134b0801a.png
[panda]: https://user-images.githubusercontent.com/469199/99263111-0cdf7380-281f-11eb-9cfe-338b2aae0503.png
[pendulum]: https://user-images.githubusercontent.com/469199/99262383-321fb200-281e-11eb-89cc-cc31f590daa3.png
## Description
**ScenarIO** is a C++ abstraction layer to interact with simulated and real robots.
It mainly provides the following
[C++ interfaces](https://github.com/robotology/gym-ignition/tree/master/scenario/core/include/scenario/core):
- `scenario::core::World`
- `scenario::core::Model`
- `scenario::core::Link`
- `scenario::core::Joint`
These interfaces can be implemented to operate on different scenarios,
including robots operating on either simulated worlds or in real-time.
ScenarIO currently fully implements **Gazebo ScenarIO**,
a simulated back-end that interacts with [Ignition Gazebo](https://ignitionrobotics.org).
The result allows stepping the simulator programmatically, ensuring a fully reproducible behaviour.
It relates closely to other projects like
[pybullet](https://github.com/bulletphysics/bullet3) and [mujoco-py](https://github.com/openai/mujoco-py).
A real-time backend that interacts with the [YARP](https://github.com/robotology/yarp) middleware is under development.
ScenarIO can be used either from C++ ([APIs](https://robotology.github.io/gym-ignition/master/breathe/core.html))
or from Python ([APIs](https://robotology.github.io/gym-ignition/master/apidoc/scenario/scenario.bindings.html)).
If you're interested to know the reasons why we started developing ScenarIO and why we selected Ignition Gazebo
for our simulations, visit the _Motivations_ section of the
[website][website].
## Installation
ScenarIO only supports a single distribution of the Ignition suite.
Visit our [Support Policy](https://robotology.github.io/gym-ignition/master/installation/support_policy.html)
to check the distribution currently supported.
Then, install the supported Ignition suite following the
[official instructions](https://ignitionrobotics.org/docs/fortress).
### Python
Execute, preferably in a [virtual environment](https://docs.python.org/3.8/tutorial/venv.html):
```bash
pip install scenario
```
### C++
You can either clone and install the standalone project:
```cmake
git clone https://github.com/robotology/gym-ignition
cd gym-ignition/scenario
cmake -S . -B build/
cmake --build build/ --target install
```
or include it in your CMake project with
[`FetchContent`](https://cmake.org/cmake/help/latest/module/FetchContent.html).
## Usage
You can find some examples that show the usage of ScenarIO in the _Getting Started_ section of the
[website][website].
## Contributing
Please visit the _Limitations_ section of the [website][website] and check the
[`good first issue`](https://github.com/robotology/gym-ignition/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
and
[`help wanted`](https://github.com/robotology/gym-ignition/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22)
issues.
You can visit our community forum hosted in [GitHub Discussions](https://github.com/robotology/gym-ignition/discussions).
Even without coding skills, replying user's questions is a great way of contributing.
If you use ScenarIO in your application and want to show it off, visit the
[Show and tell](https://github.com/robotology/gym-ignition/discussions/categories/show-and-tell) section!
Pull requests are welcome.
For major changes, please open a [discussion](https://github.com/robotology/gym-ignition/discussions)
first to propose what you would like to change.
## Citation
```bibtex
@INPROCEEDINGS{ferigo2020gymignition,
title={Gym-Ignition: Reproducible Robotic Simulations for Reinforcement Learning},
author={D. {Ferigo} and S. {Traversaro} and G. {Metta} and D. {Pucci}},
booktitle={2020 IEEE/SICE International Symposium on System Integration (SII)},
year={2020},
pages={885-890},
doi={10.1109/SII46433.2020.9025951}
}
```
## License
[LGPL v2.1](https://choosealicense.com/licenses/lgpl-2.1/) or any later version.
We vendor some resources from the Ignition code base.
For this reason, Gazebo ScenarIO is double-licensed with the
[Apache License](https://choosealicense.com/licenses/apache-2.0/).
[website]: https://robotology.github.io/gym-ignition
================================================
FILE: scenario/bindings/CMakeLists.txt
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
# The SWIG bindings are installed as a standalone Python package.
#
# We create with CMake the following structure in the build tree:
#
# /bindings/scenario/
# ├── bindings
# │ ├── core.py
# │ ├── _core.so
# │ ├── gazebo.py
# │ ├── _gazebo.so
# │ └── __init__.py
# └── __init__.py
#
# That is later installed either in the active virtualenv or packaged
# as a PyPI package. The former is related to the Develop Installation
# and the latter to the User Installation.
#
# Having a working Python package tree in the build folder has multiple
# benefit, including the possibility to use it without installing.
# Beyond this reason, our documentation pipeline requires to import
# a working package from the build tree.
if(${CMAKE_VERSION} VERSION_GREATER 3.13)
cmake_policy(SET CMP0078 NEW)
endif()
if(${CMAKE_VERSION} VERSION_GREATER 3.14)
cmake_policy(SET CMP0086 NEW)
endif()
find_package(SWIG 4.0 REQUIRED)
set(UseSWIG_MODULE_VERSION 2)
include(${SWIG_USE_FILE})
# By default, install ScenarIO in the python site-package directory
if(NOT BINDINGS_INSTALL_PREFIX)
set(BINDINGS_INSTALL_PREFIX ${Python3_SITELIB})
endif()
# Expose the install prefix as CMake option
set(BINDINGS_INSTALL_PREFIX "${BINDINGS_INSTALL_PREFIX}"
CACHE STRING "Installation prefix of the bindings")
# Final directory of the "scenario" package
if(NOT SCENARIO_CALL_FROM_SETUP_PY)
set(SCENARIO_PACKAGE_INSTALL_DIR "${BINDINGS_INSTALL_PREFIX}/scenario")
else()
# If packaging for PyPI, install ScenarIO in the temp site-package directory
# created by either setup.py or pip.
# The "scenario/" folder is added by cmake_build_extension
set(SCENARIO_PACKAGE_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}")
endif()
# Add the SWIG folders
add_subdirectory(core)
if(SCENARIO_USE_IGNITION)
add_subdirectory(gazebo)
endif()
# Move main init.py file to package root of the build tree
configure_file(
${CMAKE_CURRENT_SOURCE_DIR}/__init__.py
${CMAKE_CURRENT_BINARY_DIR}/scenario/__init__.py)
# Make scenario.bindings a package
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/scenario/bindings)
file(TOUCH ${CMAKE_CURRENT_BINARY_DIR}/scenario/bindings/__init__.py)
# Move main init.py file to package root of the install tree
install(
FILES ${CMAKE_CURRENT_SOURCE_DIR}/__init__.py
DESTINATION ${SCENARIO_PACKAGE_INSTALL_DIR}
COMPONENT python)
================================================
FILE: scenario/bindings/__init__.py
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
import os
import sys
from enum import Enum, auto
from pathlib import Path
import packaging.specifiers
import packaging.version
def supported_versions_specifier_set() -> packaging.specifiers.SpecifierSet:
# If 6 is the Ignition distribution major version, the following specifier enables
# the compatibility with all the following versions:
#
# 6.Y.Z.devK
# 6.Y.Z.alphaK
# 6.Y.Z.betaK
# 6.Y.Z.rcK
# 6.Y.Z.preK
# 6.Y.Z.postK
#
return packaging.specifiers.SpecifierSet(">=6.0.0.pre,<7.0.0.dev")
class InstallMode(Enum):
User = auto()
CondaBuild = auto()
Developer = auto()
def detect_install_mode() -> InstallMode:
import scenario.bindings.core
install_prefix = scenario.bindings.core.get_install_prefix()
# In conda, there are null bytes terminating the returned string
install_prefix = install_prefix.replace("\x00", "")
if "$PREFIX" in install_prefix:
return InstallMode.CondaBuild
if install_prefix == "":
return InstallMode.User
else:
return InstallMode.Developer
def setup_gazebo_environment() -> None:
import scenario.bindings.core
# Configure the environment
ign_gazebo_system_plugin_path = ""
if "IGN_GAZEBO_SYSTEM_PLUGIN_PATH" in os.environ:
ign_gazebo_system_plugin_path = os.environ.get("IGN_GAZEBO_SYSTEM_PLUGIN_PATH")
# Exporting this env variable is done by the conda "libscenario" package
if detect_install_mode() is InstallMode.CondaBuild:
return
# Add the plugins path
if detect_install_mode() is InstallMode.Developer:
install_prefix = scenario.bindings.core.get_install_prefix()
# In conda, there are null bytes terminating the returned string
install_prefix = Path(install_prefix.replace("\x00", ""))
elif detect_install_mode() is InstallMode.User:
install_prefix = Path(os.path.dirname(__file__))
else:
raise ValueError(detect_install_mode())
plugin_dir = install_prefix / "lib" / "scenario" / "plugins"
ign_gazebo_system_plugin_path += f":{str(plugin_dir)}"
os.environ["IGN_GAZEBO_SYSTEM_PLUGIN_PATH"] = ign_gazebo_system_plugin_path
def preload_tensorflow_shared_libraries() -> None:
# Check if tensorflow is installed
import importlib.util
spec = importlib.util.find_spec("tensorflow")
if spec is None:
return
# Get the tensorflow __init__ location
import pathlib
init = pathlib.Path(spec.origin)
# Get the tensorflow top-level folder
tensorflow_dir = init.parent
assert tensorflow_dir.is_dir()
# Get the tensorflow/python folder
tensorflow_python_dir = tensorflow_dir / "python"
assert tensorflow_python_dir.is_dir()
# Load the main shared library
for lib in tensorflow_dir.glob("*tensorflow*.so*"):
import ctypes
ctypes.CDLL(str(lib))
# Load all the shared libraries inside tensorflow/python
for lib in tensorflow_python_dir.glob("_*.so"):
import ctypes
ctypes.CDLL(str(lib))
def pre_import_gym() -> None:
# Check if gym is installed
import importlib.util
spec = importlib.util.find_spec("gym")
if spec is None:
return
import gym
def check_gazebo_installation() -> None:
import subprocess
try:
command = ["ign", "gazebo", "--versions"]
result = subprocess.run(command, capture_output=True, text=True, check=True)
except FileNotFoundError:
msg = "Failed to find the 'ign' command in your PATH. "
msg += "Make sure that Ignition is installed "
msg += "and your environment is properly configured."
raise RuntimeError(msg)
except subprocess.CalledProcessError:
raise RuntimeError(f"Failed to execute command: {' '.join(command)}") # noqa
# Strip the command output
gazebo_versions_string = result.stdout.strip()
# Get the gazebo version from the command line.
# Since the releases could be in the "6.0.0~preK" form, we replace '~' with '.' to
# be compatible with the 'packaging' package.
gazebo_version_string_normalized = gazebo_versions_string.replace("~", ".")
# The output could be multiline, listing all the Ignition Gazebo versions found
gazebo_versions = gazebo_version_string_normalized.split(sep=os.linesep)
try:
# Parse the gazebo versions
gazebo_versions_parsed = [packaging.version.Version(v) for v in gazebo_versions]
except:
raise RuntimeError(
f"Failed to parse the output of: {' '.join(command)} ({gazebo_versions})"
)
for version in gazebo_versions_parsed:
if version in supported_versions_specifier_set():
return
msg = f"Failed to find Ignition Gazebo {supported_versions_specifier_set()} "
msg += f"(found incompatible version(s): {gazebo_versions_parsed})"
raise RuntimeError(msg)
def import_gazebo() -> None:
# Check the the module was never loaded by someone else
if "scenario.bindings._gazebo" in sys.modules:
raise ImportError("Failed to load ScenarIO Gazebo with custom dlopen flags")
# Preload the shared libraries of tensorflow if the package is installed.
# If tensorflow is imported after scenario.bindings.gazebo, the application segfaults.
if os.environ.get("SCENARIO_DISABLE_TENSORFLOW_PRELOAD") != "1":
preload_tensorflow_shared_libraries()
# Import gym before scenario.bindings.gazebo. Similarly to tensorflow, also gym
# includes a module that imports protobuf, producing a similar segfault.
if os.environ.get("SCENARIO_DISABLE_GYM_PREIMPORT") != "1":
pre_import_gym()
# Import SWIG bindings
# See https://github.com/robotology/gym-ignition/issues/7
# https://stackoverflow.com/a/45473441/12150968
if sys.platform.startswith("linux") or sys.platform.startswith("darwin"):
# Update the dlopen flags
dlopen_flags = sys.getdlopenflags()
sys.setdlopenflags(dlopen_flags | os.RTLD_GLOBAL)
import scenario.bindings.gazebo
# Restore the flags
sys.setdlopenflags(dlopen_flags)
else:
import scenario.bindings.gazebo
def create_home_dot_folder() -> None:
# Make sure that the dot folder in the user's home exists
Path("~/.ignition/gazebo").expanduser().mkdir(
mode=0o755, parents=True, exist_ok=True
)
# ===================
# Import the bindings
# ===================
# Find the _gazebo.* shared lib
if len(list((Path(__file__).parent / "bindings").glob(pattern="_gazebo.*"))) == 1:
check_gazebo_installation()
import_gazebo()
create_home_dot_folder()
setup_gazebo_environment()
from .bindings import gazebo
# Find the _yarp.* shared lib
if len(list((Path(__file__).parent / "bindings").glob(pattern="_yarp.*"))) == 1:
from .bindings.yarp import yarp
from .bindings import core
================================================
FILE: scenario/bindings/core/CMakeLists.txt
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
set(scenario_swig_name "core")
set_source_files_properties(${scenario_swig_name}.i PROPERTIES CPLUSPLUS ON)
# The bindings shared library is stored in the Python package of the build tree
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/../scenario/bindings)
swig_add_library(${scenario_swig_name}
TYPE MODULE
LANGUAGE python
OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/../scenario/bindings
OUTFILE_DIR ${CMAKE_CURRENT_BINARY_DIR}/..
SOURCES ${scenario_swig_name}.i)
add_library(ScenarioSwig::Core ALIAS core)
target_link_libraries(${scenario_swig_name} PUBLIC
ScenarioCore::ScenarioABC
ScenarioCore::CoreUtils
Python3::Python)
set_property(TARGET ${scenario_swig_name} PROPERTY
SWIG_USE_TARGET_INCLUDE_DIRECTORIES TRUE)
set_property(TARGET ${scenario_swig_name}
PROPERTY SWIG_COMPILE_OPTIONS -doxygen)
# Add the to_gazebo() helpers
if(SCENARIO_USE_IGNITION)
set_property(TARGET ${scenario_swig_name} PROPERTY
SWIG_COMPILE_DEFINITIONS SCENARIO_HAS_GAZEBO)
set_property(TARGET ${scenario_swig_name} PROPERTY
SWIG_DEPENDS "../gazebo/to_gazebo.i")
target_link_libraries(${scenario_swig_name} PUBLIC ScenarioGazebo)
endif()
# Get the core.py wrapper file
get_property(WRAPPER_PY_FILE TARGET ${scenario_swig_name} PROPERTY SWIG_SUPPORT_FILES)
install(
TARGETS ${scenario_swig_name}
COMPONENT python
LIBRARY DESTINATION ${SCENARIO_PACKAGE_INSTALL_DIR}/bindings
ARCHIVE DESTINATION ${SCENARIO_PACKAGE_INSTALL_DIR}/bindings
RUNTIME DESTINATION ${SCENARIO_PACKAGE_INSTALL_DIR}/bindings)
install(
FILES ${WRAPPER_PY_FILE}
DESTINATION ${SCENARIO_PACKAGE_INSTALL_DIR}/bindings
COMPONENT python)
================================================
FILE: scenario/bindings/core/core.i
================================================
%module(package="scenario.bindings") core
%{
#define SWIG_FILE_WITH_INIT
#include "scenario/core/Joint.h"
#include "scenario/core/Link.h"
#include "scenario/core/Model.h"
#include "scenario/core/World.h"
#include "scenario/core/utils/utils.h"
%}
%naturalvar;
// Convert all exceptions to RuntimeError
%include "exception.i"
%exception {
try {
$action
} catch (const std::exception& e) {
SWIG_exception(SWIG_RuntimeError, e.what());
}
}
// STL classes
%include
%include
%include
%include
%include
%include
// Convert python list to std::vector
%template(VectorI) std::vector;
%template(VectorU) std::vector;
%template(VectorF) std::vector;
%template(VectorD) std::vector;
%template(VectorS) std::vector;
// Convert python list to std::array
%template(Array3d) std::array;
%template(Array4d) std::array;
%template(Array6d) std::array;
// Pair instantiation
%template(PosePair) std::pair, std::array>;
// ScenarI/O templates
%template(VectorOfLinks) std::vector;
%template(VectorOfJoints) std::vector;
%template(VectorOfContacts) std::vector;
%template(VectorOfContactPoints) std::vector;
// Doxygen typemaps
%typemap(doctype) std::array "Tuple[float, float, float]";
%typemap(doctype) std::array "Tuple[float, float, float, float]";
%typemap(doctype) std::array "Tuple[float, float, float, float, float, float]";
%typemap(doctype) std::vector "Tuple[float]";
%typemap(doctype) std::vector "Tuple[string]";
%typemap(doctype) std::vector "Tuple[Link]";
%typemap(doctype) std::vector "Tuple[Joint]";
%typemap(doctype) std::vector "Tuple[Contact]";
%typemap(doctype) std::vector "Tuple[ContactPoint]";
%pythonbegin %{
from typing import Tuple
%}
// NOTE: Keep all template instantiations above.
// Rename all methods to undercase with _ separators excluding the classes.
%rename("%(undercase)s") "";
%rename("") PID;
%rename("") Pose;
%rename("") Link;
%rename("") Joint;
%rename("") Model;
%rename("") World;
%rename("") Limit;
%rename("") Contact;
%rename("") JointType;
%rename("") JointLimit;
%rename("") ContactPoint;
%rename("") JointControlMode;
// Public helpers
%include "scenario/core/utils/utils.h"
// Other templates for ScenarI/O APIs
%shared_ptr(scenario::core::Joint)
%shared_ptr(scenario::core::Link)
%shared_ptr(scenario::core::Model)
%shared_ptr(scenario::core::World)
// ScenarI/O core headers
%include "scenario/core/Joint.h"
%include "scenario/core/Link.h"
%include "scenario/core/Model.h"
%include "scenario/core/World.h"
// Downcast pointers to the implementation classes
#if defined (SCENARIO_HAS_GAZEBO)
%include "../gazebo/to_gazebo.i"
#endif
================================================
FILE: scenario/bindings/gazebo/CMakeLists.txt
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
set(scenario_swig_name "gazebo")
set_source_files_properties(${scenario_swig_name}.i PROPERTIES CPLUSPLUS ON)
# The bindings shared library is stored in the Python package of the build tree
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/../scenario/bindings)
swig_add_library(${scenario_swig_name}
TYPE MODULE
LANGUAGE python
OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/../scenario/bindings
OUTFILE_DIR ${CMAKE_CURRENT_BINARY_DIR}/..
SOURCES ${scenario_swig_name}.i)
target_link_libraries(${scenario_swig_name}
PUBLIC
ScenarioGazebo::ScenarioGazebo
ScenarioGazebo::GazeboSimulator
Python3::Python)
add_library(ScenarioSwig::Gazebo ALIAS gazebo)
set_property(TARGET ${scenario_swig_name} PROPERTY
SWIG_USE_TARGET_INCLUDE_DIRECTORIES TRUE)
# https://github.com/swig/swig/issues/672#issuecomment-400577864
set_property(TARGET ${scenario_swig_name} PROPERTY
SWIG_COMPILE_OPTIONS -doxygen -Dfinal)
# Disable SWIG debug code due to the following error:
# int SWIG_Python_ConvertPtrAndOwn(PyObject *, void **, swig_type_info *, int, int *):
# Assertion `own' failed.
# https://github.com/swig/swig/issues/731
# https://github.com/swig/swig/issues/773
target_compile_definitions(${scenario_swig_name} PRIVATE NDEBUG)
# Get the gazebo.py wrapper file
get_property(WRAPPER_PY_FILE TARGET ${scenario_swig_name} PROPERTY SWIG_SUPPORT_FILES)
install(
TARGETS ${scenario_swig_name}
COMPONENT python
LIBRARY DESTINATION ${SCENARIO_PACKAGE_INSTALL_DIR}/bindings
ARCHIVE DESTINATION ${SCENARIO_PACKAGE_INSTALL_DIR}/bindings
RUNTIME DESTINATION ${SCENARIO_PACKAGE_INSTALL_DIR}/bindings)
install(
FILES ${WRAPPER_PY_FILE}
DESTINATION ${SCENARIO_PACKAGE_INSTALL_DIR}/bindings
COMPONENT python)
================================================
FILE: scenario/bindings/gazebo/gazebo.i
================================================
%module(package="scenario.bindings") gazebo
%{
#define SWIG_FILE_WITH_INIT
#include "scenario/gazebo/GazeboEntity.h"
#include "scenario/gazebo/GazeboSimulator.h"
#include "scenario/gazebo/Joint.h"
#include "scenario/gazebo/Link.h"
#include "scenario/gazebo/Model.h"
#include "scenario/gazebo/utils.h"
#include "scenario/gazebo/World.h"
#include
%}
%naturalvar;
// Keep templated functions above the %rename directive
%inline %{
namespace scenario::gazebo::utils {
template
std::shared_ptr ToGazebo(const std::shared_ptr& base)
{
return std::dynamic_pointer_cast(base);
}
}
%}
// Helpers for downcasting to Gazebo classes
%template(ToGazeboWorld) scenario::gazebo::utils::ToGazebo;
%template(ToGazeboModel) scenario::gazebo::utils::ToGazebo;
%template(ToGazeboJoint) scenario::gazebo::utils::ToGazebo;
%template(ToGazeboLink) scenario::gazebo::utils::ToGazebo;
// STL classes
%include
%include
%include
%include
%include
%include
// Import the module with core classes
// From http://www.swig.org/Doc4.0/Modules.html
%import "../core/core.i"
// NOTE: Keep all template instantiations above.
// Rename all methods to undercase with _ separators excluding the classes.
%rename("%(undercase)s") "";
%rename("") PID;
%rename("") Pose;
%rename("") Link;
%rename("") Joint;
%rename("") Model;
%rename("") World;
%rename("") Limit;
%rename("") Contact;
%rename("") JointType;
%rename("") Verbosity;
%rename("") JointLimit;
%rename("") ContactPoint;
%rename("") GazeboEntity;
%rename("") PhysicsEngine;
%rename("") GazeboSimulator;
%rename("") JointControlMode;
// Other templates for ScenarI/O APIs
%shared_ptr(scenario::gazebo::Joint)
%shared_ptr(scenario::gazebo::Link)
%shared_ptr(scenario::gazebo::Model)
%shared_ptr(scenario::gazebo::World)
%shared_ptr(scenario::gazebo::GazeboEntity)
// Ignored methods
%ignore scenario::gazebo::GazeboEntity::ecm;
%ignore scenario::gazebo::GazeboEntity::initialize;
%ignore scenario::gazebo::GazeboEntity::validEntity;
%ignore scenario::gazebo::GazeboEntity::eventManager;
%ignore scenario::gazebo::GazeboEntity::createECMResources;
// Workaround for https://github.com/swig/swig/issues/1830
%feature("pythonprepend") scenario::gazebo::World::getModel %{
r"""
Get a model part of the world.
:type modelName: string
:param modelName: The name of the model to get.
:rtype: :py:class:`scenario.core.Model`
:return: The model if it is part of the world, None otherwise.
"""
%}
// Interface of Gazebo entities
%include "scenario/gazebo/GazeboEntity.h"
// Public helpers
%include "scenario/gazebo/utils.h"
// ScenarI/O headers
%include "scenario/gazebo/Joint.h"
%include "scenario/gazebo/Link.h"
%include "scenario/gazebo/Model.h"
%include "scenario/gazebo/World.h"
// GazeboSimulator
%include "scenario/gazebo/GazeboSimulator.h"
================================================
FILE: scenario/bindings/gazebo/to_gazebo.i
================================================
%pythonbegin %{
from typing import Union
%}
%extend scenario::core::World {
%pythoncode %{
def to_gazebo(self) -> Union["scenario.bindings.gazebo.World", "scenario.bindings.core.World"]:
import scenario.bindings.gazebo
return scenario.bindings.gazebo.ToGazeboWorld(self)
%}
}
%extend scenario::core::Model {
%pythoncode %{
def to_gazebo(self) -> Union["scenario.bindings.gazebo.Model", "scenario.bindings.core.Model"]:
import scenario.bindings.gazebo
return scenario.bindings.gazebo.ToGazeboModel(self)
%}
}
%extend scenario::core::Joint {
%pythoncode %{
def to_gazebo(self) -> Union["scenario.bindings.gazebo.Joint", "scenario.bindings.core.Joint"]:
import scenario.bindings.gazebo
return scenario.bindings.gazebo.ToGazeboJoint(self)
%}
}
%extend scenario::core::Link {
%pythoncode %{
def to_gazebo(self) -> Union["scenario.bindings.gazebo.Link", "scenario.bindings.core.Link"]:
import scenario.bindings.gazebo
return scenario.bindings.gazebo.ToGazeboLink(self)
%}
}
================================================
FILE: scenario/cmake/AddUninstallTarget.cmake
================================================
#.rst:
# AddUninstallTarget
# ------------------
#
# Add the "uninstall" target for your project::
#
# include(AddUninstallTarget)
#
#
# will create a file ``cmake_uninstall.cmake`` in the build directory and add a
# custom target ``uninstall`` (or ``UNINSTALL`` on Visual Studio and Xcode) that
# will remove the files installed by your package (using
# ``install_manifest.txt``).
# See also
# https://gitlab.kitware.com/cmake/community/wikis/FAQ#can-i-do-make-uninstall-with-cmake
#
# The :module:`AddUninstallTarget` module must be included in your main
# ``CMakeLists.txt``. If included in a subdirectory it does nothing.
# This allows you to use it safely in your main ``CMakeLists.txt`` and include
# your project using ``add_subdirectory`` (for example when using it with
# :cmake:module:`FetchContent`).
#
# If the ``uninstall`` target already exists, the module does nothing.
#=============================================================================
# Copyright 2008-2013 Kitware, Inc.
# Copyright 2013 Istituto Italiano di Tecnologia (IIT)
# Authors: Daniele E. Domenichelli
#
# Distributed under the OSI-approved BSD License (the "License");
# see accompanying file Copyright.txt for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
# (To distribute this file outside of CMake, substitute the full
# License text for the above reference.)
# AddUninstallTarget works only when included in the main CMakeLists.txt
if(NOT "${CMAKE_CURRENT_BINARY_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
return()
endif()
# The name of the target is uppercase in MSVC and Xcode (for coherence with the
# other standard targets)
if("${CMAKE_GENERATOR}" MATCHES "^(Visual Studio|Xcode)")
set(_uninstall "UNINSTALL")
else()
set(_uninstall "uninstall")
endif()
# If target is already defined don't do anything
if(TARGET ${_uninstall})
return()
endif()
set(_filename cmake_uninstall.cmake)
file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/${_filename}"
"if(NOT EXISTS \"${CMAKE_CURRENT_BINARY_DIR}/install_manifest.txt\")
message(WARNING \"Cannot find install manifest: \\\"${CMAKE_CURRENT_BINARY_DIR}/install_manifest.txt\\\"\")
return()
endif()
file(READ \"${CMAKE_CURRENT_BINARY_DIR}/install_manifest.txt\" files)
string(STRIP \"\${files}\" files)
string(REGEX REPLACE \"\\n\" \";\" files \"\${files}\")
list(REVERSE files)
foreach(file \${files})
if(IS_SYMLINK \"\$ENV{DESTDIR}\${file}\" OR EXISTS \"\$ENV{DESTDIR}\${file}\")
message(STATUS \"Uninstalling: \$ENV{DESTDIR}\${file}\")
execute_process(
COMMAND \${CMAKE_COMMAND} -E remove \"\$ENV{DESTDIR}\${file}\"
OUTPUT_VARIABLE rm_out
RESULT_VARIABLE rm_retval)
if(NOT \"\${rm_retval}\" EQUAL 0)
message(FATAL_ERROR \"Problem when removing \\\"\$ENV{DESTDIR}\${file}\\\"\")
endif()
else()
message(STATUS \"Not-found: \$ENV{DESTDIR}\${file}\")
endif()
endforeach(file)
")
set(_desc "Uninstall the project...")
if(CMAKE_GENERATOR STREQUAL "Unix Makefiles")
set(_comment COMMAND \$\(CMAKE_COMMAND\) -E cmake_echo_color --switch=$\(COLOR\) --cyan "${_desc}")
else()
set(_comment COMMENT "${_desc}")
endif()
add_custom_target(${_uninstall}
${_comment}
COMMAND ${CMAKE_COMMAND} -P ${_filename}
USES_TERMINAL
BYPRODUCTS uninstall_byproduct
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}")
set_property(SOURCE uninstall_byproduct PROPERTY SYMBOLIC 1)
set_property(TARGET ${_uninstall} PROPERTY FOLDER "CMakePredefinedTargets")
================================================
FILE: scenario/cmake/AliasImportedTarget.cmake
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
macro(alias_imported_target)
set(_prefix "ait")
string(TOUPPER ${_prefix} _prefix)
set(_oneValueArgs
PACKAGE_ORIG
PACKAGE_DEST
NAMESPACE_ORIG
NAMESPACE_DEST
REQUIRED)
set(_multiValueArgs
COMPONENTS
TARGETS_ORIG
TARGETS_DEST)
set(_options)
# For each {one,multi}value variable FOO, this command creates
# the variable ${_prefix}_FOO with the passed arg(s)
cmake_parse_arguments(${_prefix}
"${_options}"
"${_oneValueArgs}"
"${_multiValueArgs}"
"${ARGN}")
unset(_options)
unset(_oneValueArgs)
unset(_multiValueArgs)
if(${${_prefix}_REQUIRED})
find_package(
${${_prefix}_PACKAGE_ORIG}
COMPONENTS ${${_prefix}_COMPONENTS}
REQUIRED)
else()
find_package(
${${_prefix}_PACKAGE_ORIG}
COMPONENTS ${${_prefix}_COMPONENTS}
QUIET)
endif()
# Example:
# ${${_prefix}_PACKAGE_ORIG} = ign-gazebo3
if(${${_prefix}_PACKAGE_ORIG}_FOUND)
message(DEBUG "Processing package: ${${_prefix}_PACKAGE_ORIG}")
# Check length of lists
list(LENGTH ${_prefix}_TARGETS_ORIG _num_targets_orig)
list(LENGTH ${_prefix}_TARGETS_DEST _num_targets_dest)
math(EXPR _comparison "${_num_targets_orig} - ${_num_targets_dest}"
OUTPUT_FORMAT DECIMAL)
if(${_num_targets_orig} STREQUAL 0)
message(FATAL_ERROR "No targets passed")
endif()
if(NOT ${_comparison} STREQUAL 0)
message(FATAL_ERROR "Number or TARGETS_ elements do not match")
endif()
# Example:
# ${_package_name} = ignition-gazebo3
set(_package_name ${${_prefix}_PACKAGE_ORIG})
# Example:
# ${_variable_name} = ignition-gazebo
set(_variable_name ${${_prefix}_PACKAGE_DEST})
message(DEBUG " Setting: ${_variable_name}=${_package_name}")
set(${_variable_name} ${_package_name})
# TODO Use ZIP_LISTS with CMake > 3.17
math(EXPR _num_targets "${_num_targets_orig} - 1")
foreach(idx RANGE ${_num_targets})
list(GET ${_prefix}_TARGETS_ORIG ${idx} _target_orig)
list(GET ${_prefix}_TARGETS_DEST ${idx} _target_dest)
# Example:
# ${_target_name} = ignition-gazebo3::core
set(_target_name ${${_prefix}_NAMESPACE_ORIG}::${_target_orig})
# Example:
# ${_target_name} = ignition-gazebo.core
set(_variable_name ${${_prefix}_NAMESPACE_DEST}.${_target_dest})
if(NOT TARGET ${_target_name})
message(FATAL_ERROR "Could not find target ${_target_name}")
endif()
message(DEBUG " Setting: ${_variable_name}=${_target_name}")
set(${_variable_name} ${_target_name})
endforeach()
endif()
unset(_prefix)
unset(_num_targets_orig)
unset(_num_targets_dest)
unset(_comparison)
unset(_package_name)
unset(_num_targets)
unset(_target_orig)
unset(_target_dest)
unset(_target_name)
unset(_variable_name)
endmacro()
================================================
FILE: scenario/cmake/FindIgnitionDistribution.cmake
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
# In this initial implementation, we support only specifying
# the Ignition Gazebo version to determine the Ignition distribution.
# In the future we could pull and parse the tags.yaml file.
set(IGNITION-GAZEBO_CITADEL_VER 3)
set(IGNITION-GAZEBO_DOME_VER 4)
set(IGNITION-GAZEBO_EDIFICE_VER 5)
set(IGNITION-GAZEBO_FORTRESS_VER 6)
macro(find_ignition_distribution)
set(_prefix "fid")
string(TOUPPER ${_prefix} _prefix)
set(_oneValueArgs
CODENAME
REQUIRED)
set(_multiValueArgs
PACKAGES)
# For each {one,multi}value variable FOO, this command creates
# the variable ${_prefix}_FOO with the passed arg(s)
cmake_parse_arguments(${_prefix}
"${_options}"
"${_oneValueArgs}"
"${_multiValueArgs}"
"${ARGN}")
unset(_options)
unset(_oneValueArgs)
unset(_multiValueArgs)
# Example:
# ${Citadel_FOUND}=TRUE
set(${${_prefix}_CODENAME}_FOUND TRUE)
set(IGNITION_FOUND FALSE)
set(_pkgs_not_found)
# Example:
# ${PKG}=ignition-gazebo
foreach(PKG IN LISTS ${_prefix}_PACKAGES)
# Example:
# ${_codename_upper}=CITADEL
string(TOUPPER ${${_prefix}_CODENAME} _codename_upper)
# Example:
# ${_pkg_upper}=IGNITION-GAZEBO
string(TOUPPER ${PKG} _pkg_upper)
# Example:
# ${_rel_variable_name}=IGNITION-GAZEBO_CITADEL_VER
set(_rel_variable_name ${_pkg_upper}_${_codename_upper}_VER)
if(NOT ${_rel_variable_name})
message(FATAL_ERROR "Failed to find variable ${_rel_variable_name}")
endif()
# Example:
# ${_pkg_name}=ignition-gazebo3
set(_pkg_name ${PKG}${${_rel_variable_name}})
# Find the package
if(${${_prefix}_REQUIRED})
find_package(${_pkg_name} REQUIRED)
else()
find_package(${_pkg_name} QUIET)
endif()
if(NOT ${_pkg_name}_FOUND)
# Example:
# ${Citadel_FOUND}=FALSE
set(${${_prefix}_CODENAME}_FOUND FALSE)
list(APPEND _pkgs_not_found ${_pkg_name})
endif()
endforeach()
# Print missing packages
if(NOT ${${_prefix}_CODENAME}_FOUND)
message(DEBUG "Missing packages of Ignition ${${_prefix}_CODENAME}:")
foreach(PKG IN LISTS _pkgs_not_found)
message(DEBUG " ${PKG}")
endforeach()
endif()
if(${${_prefix}_CODENAME}_FOUND)
set(IGNITION_FOUND TRUE)
endif()
unset(_prefix)
unset(_codename_upper)
unset(_pkg_upper)
unset(_rel_variable_name)
unset(_pkg_name)
unset(_pkgs_not_found)
endmacro()
================================================
FILE: scenario/cmake/FindPackageHandleStandardArgs.cmake
================================================
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
# file Copyright.txt or https://cmake.org/licensing for details.
#[=======================================================================[.rst:
FindPackageHandleStandardArgs
-----------------------------
This module provides functions intended to be used in :ref:`Find Modules`
implementing :command:`find_package()` calls.
.. command:: find_package_handle_standard_args
This command handles the ``REQUIRED``, ``QUIET`` and version-related
arguments of :command:`find_package`. It also sets the
``_FOUND`` variable. The package is considered found if all
variables listed contain valid results, e.g. valid filepaths.
There are two signatures:
.. code-block:: cmake
find_package_handle_standard_args(
(DEFAULT_MSG|)
...
)
find_package_handle_standard_args(
[FOUND_VAR ]
[REQUIRED_VARS ...]
[VERSION_VAR ]
[HANDLE_VERSION_RANGE]
[HANDLE_COMPONENTS]
[CONFIG_MODE]
[NAME_MISMATCHED]
[REASON_FAILURE_MESSAGE ]
[FAIL_MESSAGE ]
)
The ``_FOUND`` variable will be set to ``TRUE`` if all
the variables ``...`` are valid and any optional
constraints are satisfied, and ``FALSE`` otherwise. A success or
failure message may be displayed based on the results and on
whether the ``REQUIRED`` and/or ``QUIET`` option was given to
the :command:`find_package` call.
The options are:
``(DEFAULT_MSG|)``
In the simple signature this specifies the failure message.
Use ``DEFAULT_MSG`` to ask for a default message to be computed
(recommended). Not valid in the full signature.
``FOUND_VAR ``
.. deprecated:: 3.3
Specifies either ``_FOUND`` or
``_FOUND`` as the result variable. This exists only
for compatibility with older versions of CMake and is now ignored.
Result variables of both names are always set for compatibility.
``REQUIRED_VARS ...``
Specify the variables which are required for this package.
These may be named in the generated failure message asking the
user to set the missing variable values. Therefore these should
typically be cache entries such as ``FOO_LIBRARY`` and not output
variables like ``FOO_LIBRARIES``.
.. versionchanged:: 3.18
If ``HANDLE_COMPONENTS`` is specified, this option can be omitted.
``VERSION_VAR ``
Specify the name of a variable that holds the version of the package
that has been found. This version will be checked against the
(potentially) specified required version given to the
:command:`find_package` call, including its ``EXACT`` option.
The default messages include information about the required
version and the version which has been actually found, both
if the version is ok or not.
``HANDLE_VERSION_RANGE``
.. versionadded:: 3.19
Enable handling of a version range, if one is specified. Without this
option, a developer warning will be displayed if a version range is
specified.
``HANDLE_COMPONENTS``
Enable handling of package components. In this case, the command
will report which components have been found and which are missing,
and the ``_FOUND`` variable will be set to ``FALSE``
if any of the required components (i.e. not the ones listed after
the ``OPTIONAL_COMPONENTS`` option of :command:`find_package`) are
missing.
``CONFIG_MODE``
Specify that the calling find module is a wrapper around a
call to ``find_package( NO_MODULE)``. This implies
a ``VERSION_VAR`` value of ``_VERSION``. The command
will automatically check whether the package configuration file
was found.
``REASON_FAILURE_MESSAGE ``
.. versionadded:: 3.16
Specify a custom message of the reason for the failure which will be
appended to the default generated message.
``FAIL_MESSAGE ``
Specify a custom failure message instead of using the default
generated message. Not recommended.
``NAME_MISMATCHED``
.. versionadded:: 3.17
Indicate that the ```` does not match
``${CMAKE_FIND_PACKAGE_NAME}``. This is usually a mistake and raises a
warning, but it may be intentional for usage of the command for components
of a larger package.
Example for the simple signature:
.. code-block:: cmake
find_package_handle_standard_args(LibXml2 DEFAULT_MSG
LIBXML2_LIBRARY LIBXML2_INCLUDE_DIR)
The ``LibXml2`` package is considered to be found if both
``LIBXML2_LIBRARY`` and ``LIBXML2_INCLUDE_DIR`` are valid.
Then also ``LibXml2_FOUND`` is set to ``TRUE``. If it is not found
and ``REQUIRED`` was used, it fails with a
:command:`message(FATAL_ERROR)`, independent whether ``QUIET`` was
used or not. If it is found, success will be reported, including
the content of the first ````. On repeated CMake runs,
the same message will not be printed again.
.. note::
If ```` does not match ``CMAKE_FIND_PACKAGE_NAME`` for the
calling module, a warning that there is a mismatch is given. The
``FPHSA_NAME_MISMATCHED`` variable may be set to bypass the warning if using
the old signature and the ``NAME_MISMATCHED`` argument using the new
signature. To avoid forcing the caller to require newer versions of CMake for
usage, the variable's value will be used if defined when the
``NAME_MISMATCHED`` argument is not passed for the new signature (but using
both is an error)..
Example for the full signature:
.. code-block:: cmake
find_package_handle_standard_args(LibArchive
REQUIRED_VARS LibArchive_LIBRARY LibArchive_INCLUDE_DIR
VERSION_VAR LibArchive_VERSION)
In this case, the ``LibArchive`` package is considered to be found if
both ``LibArchive_LIBRARY`` and ``LibArchive_INCLUDE_DIR`` are valid.
Also the version of ``LibArchive`` will be checked by using the version
contained in ``LibArchive_VERSION``. Since no ``FAIL_MESSAGE`` is given,
the default messages will be printed.
Another example for the full signature:
.. code-block:: cmake
find_package(Automoc4 QUIET NO_MODULE HINTS /opt/automoc4)
find_package_handle_standard_args(Automoc4 CONFIG_MODE)
In this case, a ``FindAutmoc4.cmake`` module wraps a call to
``find_package(Automoc4 NO_MODULE)`` and adds an additional search
directory for ``automoc4``. Then the call to
``find_package_handle_standard_args`` produces a proper success/failure
message.
.. command:: find_package_check_version
.. versionadded:: 3.19
Helper function which can be used to check if a ```` is valid
against version-related arguments of :command:`find_package`.
.. code-block:: cmake
find_package_check_version(
[HANDLE_VERSION_RANGE]
[RESULT_MESSAGE_VARIABLE ]
)
The ```` will hold a boolean value giving the result of the check.
The options are:
``HANDLE_VERSION_RANGE``
Enable handling of a version range, if one is specified. Without this
option, a developer warning will be displayed if a version range is
specified.
``RESULT_MESSAGE_VARIABLE ``
Specify a variable to get back a message describing the result of the check.
Example for the usage:
.. code-block:: cmake
find_package_check_version(1.2.3 result HANDLE_VERSION_RANGE
RESULT_MESSAGE_VARIABLE reason)
if (result)
message (STATUS "${reason}")
else()
message (FATAL_ERROR "${reason}")
endif()
#]=======================================================================]
include(${CMAKE_CURRENT_LIST_DIR}/FindPackageMessage.cmake)
cmake_policy(PUSH)
# numbers and boolean constants
cmake_policy (SET CMP0012 NEW)
# IN_LIST operator
cmake_policy (SET CMP0057 NEW)
# internal helper macro
macro(_FPHSA_FAILURE_MESSAGE _msg)
set (__msg "${_msg}")
if (FPHSA_REASON_FAILURE_MESSAGE)
string(APPEND __msg "\n Reason given by package: ${FPHSA_REASON_FAILURE_MESSAGE}\n")
endif()
if (${_NAME}_FIND_REQUIRED)
message(FATAL_ERROR "${__msg}")
else ()
if (NOT ${_NAME}_FIND_QUIETLY)
message(STATUS "${__msg}")
endif ()
endif ()
endmacro()
# internal helper macro to generate the failure message when used in CONFIG_MODE:
macro(_FPHSA_HANDLE_FAILURE_CONFIG_MODE)
# _CONFIG is set, but FOUND is false, this means that some other of the REQUIRED_VARS was not found:
if(${_NAME}_CONFIG)
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: missing:${MISSING_VARS} (found ${${_NAME}_CONFIG} ${VERSION_MSG})")
else()
# If _CONSIDERED_CONFIGS is set, the config-file has been found, but no suitable version.
# List them all in the error message:
if(${_NAME}_CONSIDERED_CONFIGS)
set(configsText "")
list(LENGTH ${_NAME}_CONSIDERED_CONFIGS configsCount)
math(EXPR configsCount "${configsCount} - 1")
foreach(currentConfigIndex RANGE ${configsCount})
list(GET ${_NAME}_CONSIDERED_CONFIGS ${currentConfigIndex} filename)
list(GET ${_NAME}_CONSIDERED_VERSIONS ${currentConfigIndex} version)
string(APPEND configsText "\n ${filename} (version ${version})")
endforeach()
if (${_NAME}_NOT_FOUND_MESSAGE)
if (FPHSA_REASON_FAILURE_MESSAGE)
string(PREPEND FPHSA_REASON_FAILURE_MESSAGE "${${_NAME}_NOT_FOUND_MESSAGE}\n ")
else()
set(FPHSA_REASON_FAILURE_MESSAGE "${${_NAME}_NOT_FOUND_MESSAGE}")
endif()
else()
string(APPEND configsText "\n")
endif()
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE} ${VERSION_MSG}, checked the following files:${configsText}")
else()
# Simple case: No Config-file was found at all:
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: found neither ${_NAME}Config.cmake nor ${_NAME_LOWER}-config.cmake ${VERSION_MSG}")
endif()
endif()
endmacro()
function(FIND_PACKAGE_CHECK_VERSION version result)
cmake_parse_arguments (PARSE_ARGV 2 FPCV "HANDLE_VERSION_RANGE;NO_AUTHOR_WARNING_VERSION_RANGE" "RESULT_MESSAGE_VARIABLE" "")
if (FPCV_UNPARSED_ARGUMENTS)
message (FATAL_ERROR "find_package_check_version(): ${FPCV_UNPARSED_ARGUMENTS}: unexpected arguments")
endif()
if ("RESULT_MESSAGE_VARIABLE" IN_LIST FPCV_KEYWORDS_MISSING_VALUES)
message (FATAL_ERROR "find_package_check_version(): RESULT_MESSAGE_VARIABLE expects an argument")
endif()
set (${result} FALSE PARENT_SCOPE)
if (FPCV_RESULT_MESSAGE_VARIABLE)
unset (${FPCV_RESULT_MESSAGE_VARIABLE} PARENT_SCOPE)
endif()
if (_CMAKE_FPHSA_PACKAGE_NAME)
set (package "${_CMAKE_FPHSA_PACKAGE_NAME}")
elseif (CMAKE_FIND_PACKAGE_NAME)
set (package "${CMAKE_FIND_PACKAGE_NAME}")
else()
message (FATAL_ERROR "find_package_check_version(): Cannot be used outside a 'Find Module'")
endif()
if (NOT FPCV_NO_AUTHOR_WARNING_VERSION_RANGE
AND ${package}_FIND_VERSION_RANGE AND NOT FPCV_HANDLE_VERSION_RANGE)
message(AUTHOR_WARNING
"`find_package()` specify a version range but the option "
"HANDLE_VERSION_RANGE` is not passed to `find_package_check_version()`. "
"Only the lower endpoint of the range will be used.")
endif()
set (version_ok FALSE)
unset (version_msg)
if (FPCV_HANDLE_VERSION_RANGE AND ${package}_FIND_VERSION_RANGE)
if ((${package}_FIND_VERSION_RANGE_MIN STREQUAL "INCLUDE"
AND version VERSION_GREATER_EQUAL ${package}_FIND_VERSION_MIN)
AND ((${package}_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE"
AND version VERSION_LESS_EQUAL ${package}_FIND_VERSION_MAX)
OR (${package}_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE"
AND version VERSION_LESS ${package}_FIND_VERSION_MAX)))
set (version_ok TRUE)
set(version_msg "(found suitable version \"${version}\", required range is \"${${package}_FIND_VERSION_RANGE}\")")
else()
set(version_msg "Found unsuitable version \"${version}\", required range is \"${${package}_FIND_VERSION_RANGE}\"")
endif()
elseif (DEFINED ${package}_FIND_VERSION)
if(${package}_FIND_VERSION_EXACT) # exact version required
# count the dots in the version string
string(REGEX REPLACE "[^.]" "" version_dots "${version}")
# add one dot because there is one dot more than there are components
string(LENGTH "${version_dots}." version_dots)
if (version_dots GREATER ${package}_FIND_VERSION_COUNT)
# Because of the C++ implementation of find_package() ${package}_FIND_VERSION_COUNT
# is at most 4 here. Therefore a simple lookup table is used.
if (${package}_FIND_VERSION_COUNT EQUAL 1)
set(version_regex "[^.]*")
elseif (${package}_FIND_VERSION_COUNT EQUAL 2)
set(version_regex "[^.]*\\.[^.]*")
elseif (${package}_FIND_VERSION_COUNT EQUAL 3)
set(version_regex "[^.]*\\.[^.]*\\.[^.]*")
else()
set(version_regex "[^.]*\\.[^.]*\\.[^.]*\\.[^.]*")
endif()
string(REGEX REPLACE "^(${version_regex})\\..*" "\\1" version_head "${version}")
if (NOT ${package}_FIND_VERSION VERSION_EQUAL version_head)
set(version_msg "Found unsuitable version \"${version}\", but required is exact version \"${${package}_FIND_VERSION}\"")
else ()
set(version_ok TRUE)
set(version_msg "(found suitable exact version \"${_FOUND_VERSION}\")")
endif ()
else ()
if (NOT ${package}_FIND_VERSION VERSION_EQUAL version)
set(version_msg "Found unsuitable version \"${version}\", but required is exact version \"${${package}_FIND_VERSION}\"")
else ()
set(version_ok TRUE)
set(version_msg "(found suitable exact version \"${version}\")")
endif ()
endif ()
else() # minimum version
if (${package}_FIND_VERSION VERSION_GREATER version)
set(version_msg "Found unsuitable version \"${version}\", but required is at least \"${${package}_FIND_VERSION}\"")
else()
set(version_ok TRUE)
set(version_msg "(found suitable version \"${version}\", minimum required is \"${${package}_FIND_VERSION}\")")
endif()
endif()
else ()
set(version_ok TRUE)
set(version_msg "(found version \"${version}\")")
endif()
set (${result} ${version_ok} PARENT_SCOPE)
if (FPCV_RESULT_MESSAGE_VARIABLE)
set (${FPCV_RESULT_MESSAGE_VARIABLE} "${version_msg}" PARENT_SCOPE)
endif()
endfunction()
function(FIND_PACKAGE_HANDLE_STANDARD_ARGS _NAME _FIRST_ARG)
# Set up the arguments for `cmake_parse_arguments`.
set(options CONFIG_MODE HANDLE_COMPONENTS NAME_MISMATCHED HANDLE_VERSION_RANGE)
set(oneValueArgs FAIL_MESSAGE REASON_FAILURE_MESSAGE VERSION_VAR FOUND_VAR)
set(multiValueArgs REQUIRED_VARS)
# Check whether we are in 'simple' or 'extended' mode:
set(_KEYWORDS_FOR_EXTENDED_MODE ${options} ${oneValueArgs} ${multiValueArgs} )
list(FIND _KEYWORDS_FOR_EXTENDED_MODE "${_FIRST_ARG}" INDEX)
unset(FPHSA_NAME_MISMATCHED_override)
if (DEFINED FPHSA_NAME_MISMATCHED)
# If the variable NAME_MISMATCHED variable is set, error if it is passed as
# an argument. The former is for old signatures, the latter is for new
# signatures.
list(FIND ARGN "NAME_MISMATCHED" name_mismatched_idx)
if (NOT name_mismatched_idx EQUAL "-1")
message(FATAL_ERROR
"The `NAME_MISMATCHED` argument may only be specified by the argument or "
"the variable, not both.")
endif ()
# But use the variable if it is not an argument to avoid forcing minimum
# CMake version bumps for calling modules.
set(FPHSA_NAME_MISMATCHED_override "${FPHSA_NAME_MISMATCHED}")
endif ()
if(${INDEX} EQUAL -1)
set(FPHSA_FAIL_MESSAGE ${_FIRST_ARG})
set(FPHSA_REQUIRED_VARS ${ARGN})
set(FPHSA_VERSION_VAR)
else()
cmake_parse_arguments(FPHSA "${options}" "${oneValueArgs}" "${multiValueArgs}" ${_FIRST_ARG} ${ARGN})
if(FPHSA_UNPARSED_ARGUMENTS)
message(FATAL_ERROR "Unknown keywords given to FIND_PACKAGE_HANDLE_STANDARD_ARGS(): \"${FPHSA_UNPARSED_ARGUMENTS}\"")
endif()
if(NOT FPHSA_FAIL_MESSAGE)
set(FPHSA_FAIL_MESSAGE "DEFAULT_MSG")
endif()
# In config-mode, we rely on the variable _CONFIG, which is set by find_package()
# when it successfully found the config-file, including version checking:
if(FPHSA_CONFIG_MODE)
list(INSERT FPHSA_REQUIRED_VARS 0 ${_NAME}_CONFIG)
list(REMOVE_DUPLICATES FPHSA_REQUIRED_VARS)
set(FPHSA_VERSION_VAR ${_NAME}_VERSION)
endif()
if(NOT FPHSA_REQUIRED_VARS AND NOT FPHSA_HANDLE_COMPONENTS)
message(FATAL_ERROR "No REQUIRED_VARS specified for FIND_PACKAGE_HANDLE_STANDARD_ARGS()")
endif()
endif()
if (DEFINED FPHSA_NAME_MISMATCHED_override)
set(FPHSA_NAME_MISMATCHED "${FPHSA_NAME_MISMATCHED_override}")
endif ()
if (DEFINED CMAKE_FIND_PACKAGE_NAME
AND NOT FPHSA_NAME_MISMATCHED
AND NOT _NAME STREQUAL CMAKE_FIND_PACKAGE_NAME)
message(AUTHOR_WARNING
"The package name passed to `find_package_handle_standard_args` "
"(${_NAME}) does not match the name of the calling package "
"(${CMAKE_FIND_PACKAGE_NAME}). This can lead to problems in calling "
"code that expects `find_package` result variables (e.g., `_FOUND`) "
"to follow a certain pattern.")
endif ()
if (${_NAME}_FIND_VERSION_RANGE AND NOT FPHSA_HANDLE_VERSION_RANGE)
message(AUTHOR_WARNING
"`find_package()` specify a version range but the module ${_NAME} does "
"not support this capability. Only the lower endpoint of the range "
"will be used.")
endif()
# to propagate package name to FIND_PACKAGE_CHECK_VERSION
set(_CMAKE_FPHSA_PACKAGE_NAME "${_NAME}")
# now that we collected all arguments, process them
if("x${FPHSA_FAIL_MESSAGE}" STREQUAL "xDEFAULT_MSG")
set(FPHSA_FAIL_MESSAGE "Could NOT find ${_NAME}")
endif()
if (FPHSA_REQUIRED_VARS)
list(GET FPHSA_REQUIRED_VARS 0 _FIRST_REQUIRED_VAR)
endif()
string(TOUPPER ${_NAME} _NAME_UPPER)
string(TOLOWER ${_NAME} _NAME_LOWER)
if(FPHSA_FOUND_VAR)
set(_FOUND_VAR_UPPER ${_NAME_UPPER}_FOUND)
set(_FOUND_VAR_MIXED ${_NAME}_FOUND)
if(FPHSA_FOUND_VAR STREQUAL _FOUND_VAR_MIXED OR FPHSA_FOUND_VAR STREQUAL _FOUND_VAR_UPPER)
set(_FOUND_VAR ${FPHSA_FOUND_VAR})
else()
message(FATAL_ERROR "The argument for FOUND_VAR is \"${FPHSA_FOUND_VAR}\", but only \"${_FOUND_VAR_MIXED}\" and \"${_FOUND_VAR_UPPER}\" are valid names.")
endif()
else()
set(_FOUND_VAR ${_NAME_UPPER}_FOUND)
endif()
# collect all variables which were not found, so they can be printed, so the
# user knows better what went wrong (#6375)
set(MISSING_VARS "")
set(DETAILS "")
# check if all passed variables are valid
set(FPHSA_FOUND_${_NAME} TRUE)
foreach(_CURRENT_VAR ${FPHSA_REQUIRED_VARS})
if(NOT ${_CURRENT_VAR})
set(FPHSA_FOUND_${_NAME} FALSE)
string(APPEND MISSING_VARS " ${_CURRENT_VAR}")
else()
string(APPEND DETAILS "[${${_CURRENT_VAR}}]")
endif()
endforeach()
if(FPHSA_FOUND_${_NAME})
set(${_NAME}_FOUND TRUE)
set(${_NAME_UPPER}_FOUND TRUE)
else()
set(${_NAME}_FOUND FALSE)
set(${_NAME_UPPER}_FOUND FALSE)
endif()
# component handling
unset(FOUND_COMPONENTS_MSG)
unset(MISSING_COMPONENTS_MSG)
if(FPHSA_HANDLE_COMPONENTS)
foreach(comp ${${_NAME}_FIND_COMPONENTS})
if(${_NAME}_${comp}_FOUND)
if(NOT DEFINED FOUND_COMPONENTS_MSG)
set(FOUND_COMPONENTS_MSG "found components:")
endif()
string(APPEND FOUND_COMPONENTS_MSG " ${comp}")
else()
if(NOT DEFINED MISSING_COMPONENTS_MSG)
set(MISSING_COMPONENTS_MSG "missing components:")
endif()
string(APPEND MISSING_COMPONENTS_MSG " ${comp}")
if(${_NAME}_FIND_REQUIRED_${comp})
set(${_NAME}_FOUND FALSE)
string(APPEND MISSING_VARS " ${comp}")
endif()
endif()
endforeach()
set(COMPONENT_MSG "${FOUND_COMPONENTS_MSG} ${MISSING_COMPONENTS_MSG}")
string(APPEND DETAILS "[c${COMPONENT_MSG}]")
endif()
# version handling:
set(VERSION_MSG "")
set(VERSION_OK TRUE)
# check with DEFINED here as the requested or found version may be "0"
if (DEFINED ${_NAME}_FIND_VERSION)
if(DEFINED ${FPHSA_VERSION_VAR})
set(_FOUND_VERSION ${${FPHSA_VERSION_VAR}})
if (FPHSA_HANDLE_VERSION_RANGE)
set (FPCV_HANDLE_VERSION_RANGE HANDLE_VERSION_RANGE)
else()
set(FPCV_HANDLE_VERSION_RANGE NO_AUTHOR_WARNING_VERSION_RANGE)
endif()
find_package_check_version ("${_FOUND_VERSION}" VERSION_OK RESULT_MESSAGE_VARIABLE VERSION_MSG
${FPCV_HANDLE_VERSION_RANGE})
else()
# if the package was not found, but a version was given, add that to the output:
if(${_NAME}_FIND_VERSION_EXACT)
set(VERSION_MSG "(Required is exact version \"${${_NAME}_FIND_VERSION}\")")
elseif (FPHSA_HANDLE_VERSION_RANGE AND ${_NAME}_FIND_VERSION_RANGE)
set(VERSION_MSG "(Required is version range \"${${_NAME}_FIND_VERSION_RANGE}\")")
else()
set(VERSION_MSG "(Required is at least version \"${${_NAME}_FIND_VERSION}\")")
endif()
endif()
else ()
# Check with DEFINED as the found version may be 0.
if(DEFINED ${FPHSA_VERSION_VAR})
set(VERSION_MSG "(found version \"${${FPHSA_VERSION_VAR}}\")")
endif()
endif ()
if(VERSION_OK)
string(APPEND DETAILS "[v${${FPHSA_VERSION_VAR}}(${${_NAME}_FIND_VERSION})]")
else()
set(${_NAME}_FOUND FALSE)
endif()
# print the result:
if (${_NAME}_FOUND)
FIND_PACKAGE_MESSAGE(${_NAME} "Found ${_NAME}: ${${_FIRST_REQUIRED_VAR}} ${VERSION_MSG} ${COMPONENT_MSG}" "${DETAILS}")
else ()
if(FPHSA_CONFIG_MODE)
_FPHSA_HANDLE_FAILURE_CONFIG_MODE()
else()
if(NOT VERSION_OK)
set(RESULT_MSG)
if (_FIRST_REQUIRED_VAR)
string (APPEND RESULT_MSG "found ${${_FIRST_REQUIRED_VAR}}")
endif()
if (COMPONENT_MSG)
if (RESULT_MSG)
string (APPEND RESULT_MSG ", ")
endif()
string (APPEND RESULT_MSG "${FOUND_COMPONENTS_MSG}")
endif()
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: ${VERSION_MSG} (${RESULT_MSG})")
else()
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE} (missing:${MISSING_VARS}) ${VERSION_MSG}")
endif()
endif()
endif ()
set(${_NAME}_FOUND ${${_NAME}_FOUND} PARENT_SCOPE)
set(${_NAME_UPPER}_FOUND ${${_NAME}_FOUND} PARENT_SCOPE)
endfunction()
cmake_policy(POP)
================================================
FILE: scenario/cmake/FindPackageMessage.cmake
================================================
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
# file Copyright.txt or https://cmake.org/licensing for details.
#[=======================================================================[.rst:
FindPackageMessage
------------------
.. code-block:: cmake
find_package_message( "message for user" "find result details")
This function is intended to be used in FindXXX.cmake modules files.
It will print a message once for each unique find result. This is
useful for telling the user where a package was found. The first
argument specifies the name (XXX) of the package. The second argument
specifies the message to display. The third argument lists details
about the find result so that if they change the message will be
displayed again. The macro also obeys the QUIET argument to the
find_package command.
Example:
.. code-block:: cmake
if(X11_FOUND)
find_package_message(X11 "Found X11: ${X11_X11_LIB}"
"[${X11_X11_LIB}][${X11_INCLUDE_DIR}]")
else()
...
endif()
#]=======================================================================]
function(find_package_message pkg msg details)
# Avoid printing a message repeatedly for the same find result.
if(NOT ${pkg}_FIND_QUIETLY)
string(REPLACE "\n" "" details "${details}")
set(DETAILS_VAR FIND_PACKAGE_MESSAGE_DETAILS_${pkg})
if(NOT "${details}" STREQUAL "${${DETAILS_VAR}}")
# The message has not yet been printed.
message(STATUS "${msg}")
# Save the find details in the cache to avoid printing the same
# message again.
set("${DETAILS_VAR}" "${details}"
CACHE INTERNAL "Details about finding ${pkg}")
endif()
endif()
endfunction()
================================================
FILE: scenario/cmake/FindPython/Support.cmake
================================================
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
# file Copyright.txt or https://cmake.org/licensing for details.
#
# This file is a "template" file used by various FindPython modules.
#
#
# Initial configuration
#
cmake_policy(PUSH)
# numbers and boolean constants
cmake_policy (SET CMP0012 NEW)
# IN_LIST operator
cmake_policy (SET CMP0057 NEW)
if (NOT DEFINED _PYTHON_PREFIX)
message (FATAL_ERROR "FindPython: INTERNAL ERROR")
endif()
if (NOT DEFINED _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
message (FATAL_ERROR "FindPython: INTERNAL ERROR")
endif()
if (_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR EQUAL "3")
set(_${_PYTHON_PREFIX}_VERSIONS 3.10 3.9 3.8 3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0)
elseif (_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR EQUAL "2")
set(_${_PYTHON_PREFIX}_VERSIONS 2.7 2.6 2.5 2.4 2.3 2.2 2.1 2.0)
else()
message (FATAL_ERROR "FindPython: INTERNAL ERROR")
endif()
get_property(_${_PYTHON_PREFIX}_CMAKE_ROLE GLOBAL PROPERTY CMAKE_ROLE)
include (${CMAKE_CURRENT_LIST_DIR}/../FindPackageHandleStandardArgs.cmake)
#
# helper commands
#
macro (_PYTHON_DISPLAY_FAILURE _PYTHON_MSG)
if (${_PYTHON_PREFIX}_FIND_REQUIRED)
message (FATAL_ERROR "${_PYTHON_MSG}")
else()
if (NOT ${_PYTHON_PREFIX}_FIND_QUIETLY)
message(STATUS "${_PYTHON_MSG}")
endif ()
endif()
set (${_PYTHON_PREFIX}_FOUND FALSE)
string (TOUPPER "${_PYTHON_PREFIX}" _${_PYTHON_PREFIX}_UPPER_PREFIX)
set (${_PYTHON_UPPER_PREFIX}_FOUND FALSE)
endmacro()
function (_PYTHON_MARK_AS_INTERNAL)
foreach (var IN LISTS ARGV)
if (DEFINED CACHE{${var}})
set_property (CACHE ${var} PROPERTY TYPE INTERNAL)
endif()
endforeach()
endfunction()
macro (_PYTHON_SELECT_LIBRARY_CONFIGURATIONS _PYTHON_BASENAME)
if(NOT DEFINED ${_PYTHON_BASENAME}_LIBRARY_RELEASE)
set(${_PYTHON_BASENAME}_LIBRARY_RELEASE "${_PYTHON_BASENAME}_LIBRARY_RELEASE-NOTFOUND")
endif()
if(NOT DEFINED ${_PYTHON_BASENAME}_LIBRARY_DEBUG)
set(${_PYTHON_BASENAME}_LIBRARY_DEBUG "${_PYTHON_BASENAME}_LIBRARY_DEBUG-NOTFOUND")
endif()
get_property(_PYTHON_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
if (${_PYTHON_BASENAME}_LIBRARY_DEBUG AND ${_PYTHON_BASENAME}_LIBRARY_RELEASE AND
NOT ${_PYTHON_BASENAME}_LIBRARY_DEBUG STREQUAL ${_PYTHON_BASENAME}_LIBRARY_RELEASE AND
(_PYTHON_isMultiConfig OR CMAKE_BUILD_TYPE))
# if the generator is multi-config or if CMAKE_BUILD_TYPE is set for
# single-config generators, set optimized and debug libraries
set (${_PYTHON_BASENAME}_LIBRARIES "")
foreach (_PYTHON_libname IN LISTS ${_PYTHON_BASENAME}_LIBRARY_RELEASE)
list( APPEND ${_PYTHON_BASENAME}_LIBRARIES optimized "${_PYTHON_libname}")
endforeach()
foreach (_PYTHON_libname IN LISTS ${_PYTHON_BASENAME}_LIBRARY_DEBUG)
list( APPEND ${_PYTHON_BASENAME}_LIBRARIES debug "${_PYTHON_libname}")
endforeach()
elseif (${_PYTHON_BASENAME}_LIBRARY_RELEASE)
set (${_PYTHON_BASENAME}_LIBRARIES "${${_PYTHON_BASENAME}_LIBRARY_RELEASE}")
elseif (${_PYTHON_BASENAME}_LIBRARY_DEBUG)
set (${_PYTHON_BASENAME}_LIBRARIES "${${_PYTHON_BASENAME}_LIBRARY_DEBUG}")
else()
set (${_PYTHON_BASENAME}_LIBRARIES "${_PYTHON_BASENAME}_LIBRARY-NOTFOUND")
endif()
endmacro()
macro (_PYTHON_FIND_FRAMEWORKS)
if (CMAKE_HOST_APPLE OR APPLE)
file(TO_CMAKE_PATH "$ENV{CMAKE_FRAMEWORK_PATH}" _pff_CMAKE_FRAMEWORK_PATH)
set (_pff_frameworks ${CMAKE_FRAMEWORK_PATH}
${_pff_CMAKE_FRAMEWORK_PATH}
~/Library/Frameworks
/usr/local/Frameworks
${CMAKE_SYSTEM_FRAMEWORK_PATH})
list (REMOVE_DUPLICATES _pff_frameworks)
foreach (_pff_implementation IN LISTS _${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS)
unset (_${_PYTHON_PREFIX}_${_pff_implementation}_FRAMEWORKS)
if (_pff_implementation STREQUAL "CPython")
foreach (_pff_framework IN LISTS _pff_frameworks)
if (EXISTS ${_pff_framework}/Python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}.framework)
list (APPEND _${_PYTHON_PREFIX}_${_pff_implementation}_FRAMEWORKS ${_pff_framework}/Python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}.framework)
endif()
if (EXISTS ${_pff_framework}/Python.framework)
list (APPEND _${_PYTHON_PREFIX}_${_pff_implementation}_FRAMEWORKS ${_pff_framework}/Python.framework)
endif()
endforeach()
elseif (_pff_implementation STREQUAL "IronPython")
foreach (_pff_framework IN LISTS _pff_frameworks)
if (EXISTS ${_pff_framework}/IronPython.framework)
list (APPEND _${_PYTHON_PREFIX}_${_pff_implementation}_FRAMEWORKS ${_pff_framework}/IronPython.framework)
endif()
endforeach()
endif()
endforeach()
unset (_pff_implementation)
unset (_pff_frameworks)
unset (_pff_framework)
endif()
endmacro()
function (_PYTHON_GET_FRAMEWORKS _PYTHON_PGF_FRAMEWORK_PATHS)
cmake_parse_arguments (PARSE_ARGV 1 _PGF "" "" "IMPLEMENTATIONS;VERSION")
if (NOT _PGF_IMPLEMENTATIONS)
set (_PGF_IMPLEMENTATIONS ${_${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS})
endif()
set (framework_paths)
foreach (implementation IN LISTS _PGF_IMPLEMENTATIONS)
if (implementation STREQUAL "CPython")
foreach (version IN LISTS _PGF_VERSION)
foreach (framework IN LISTS _${_PYTHON_PREFIX}_${implementation}_FRAMEWORKS)
if (EXISTS "${framework}/Versions/${version}")
list (APPEND framework_paths "${framework}/Versions/${version}")
endif()
endforeach()
endforeach()
elseif (implementation STREQUAL "IronPython")
foreach (version IN LISTS _PGF_VERSION)
foreach (framework IN LISTS _${_PYTHON_PREFIX}_${implementation}_FRAMEWORKS)
# pick-up all available versions
file (GLOB versions LIST_DIRECTORIES true RELATIVE "${framework}/Versions/"
"${framework}/Versions/${version}*")
list (SORT versions ORDER DESCENDING)
list (TRANSFORM versions PREPEND "${framework}/Versions/")
list (APPEND framework_paths ${versions})
endforeach()
endforeach()
endif()
endforeach()
set (${_PYTHON_PGF_FRAMEWORK_PATHS} ${framework_paths} PARENT_SCOPE)
endfunction()
function (_PYTHON_GET_REGISTRIES _PYTHON_PGR_REGISTRY_PATHS)
cmake_parse_arguments (PARSE_ARGV 1 _PGR "" "" "IMPLEMENTATIONS;VERSION")
if (NOT _PGR_IMPLEMENTATIONS)
set (_PGR_IMPLEMENTATIONS ${_${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS})
endif()
set (registries)
foreach (implementation IN LISTS _PGR_IMPLEMENTATIONS)
if (implementation STREQUAL "CPython")
foreach (version IN LISTS _PGR_VERSION)
string (REPLACE "." "" version_no_dots ${version})
list (APPEND registries
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${version}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${version}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath])
if (version VERSION_GREATER_EQUAL "3.5")
get_filename_component (arch "[HKEY_CURRENT_USER\\Software\\Python\\PythonCore\\${version};SysArchitecture]" NAME)
if (arch MATCHES "(${_${_PYTHON_PREFIX}_ARCH}|${_${_PYTHON_PREFIX}_ARCH2})bit")
list (APPEND registries
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${version}\\InstallPath])
endif()
else()
list (APPEND registries
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${version}\\InstallPath])
endif()
list (APPEND registries
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${version_no_dots}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
[HKEY_CURRENT_USER\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${version_no_dots}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${version}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${version}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath]
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${version}\\InstallPath]
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${version_no_dots}-${_${_PYTHON_PREFIX}_ARCH}\\InstallPath]
[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\ContinuumAnalytics\\Anaconda${version_no_dots}-${_${_PYTHON_PREFIX}_ARCH2}\\InstallPath])
endforeach()
elseif (implementation STREQUAL "IronPython")
foreach (version IN LISTS _PGR_VERSION)
list (APPEND registries [HKEY_LOCAL_MACHINE\\SOFTWARE\\IronPython\\${version}\\InstallPath])
endforeach()
endif()
endforeach()
set (${_PYTHON_PGR_REGISTRY_PATHS} "${registries}" PARENT_SCOPE)
endfunction()
function (_PYTHON_GET_ABIFLAGS _PGABIFLAGS)
set (abiflags)
list (GET _${_PYTHON_PREFIX}_FIND_ABI 0 pydebug)
list (GET _${_PYTHON_PREFIX}_FIND_ABI 1 pymalloc)
list (GET _${_PYTHON_PREFIX}_FIND_ABI 2 unicode)
if (pymalloc STREQUAL "ANY" AND unicode STREQUAL "ANY")
set (abiflags "mu" "m" "u" "")
elseif (pymalloc STREQUAL "ANY" AND unicode STREQUAL "ON")
set (abiflags "mu" "u")
elseif (pymalloc STREQUAL "ANY" AND unicode STREQUAL "OFF")
set (abiflags "m" "")
elseif (pymalloc STREQUAL "ON" AND unicode STREQUAL "ANY")
set (abiflags "mu" "m")
elseif (pymalloc STREQUAL "ON" AND unicode STREQUAL "ON")
set (abiflags "mu")
elseif (pymalloc STREQUAL "ON" AND unicode STREQUAL "OFF")
set (abiflags "m")
elseif (pymalloc STREQUAL "ON" AND unicode STREQUAL "ANY")
set (abiflags "u" "")
elseif (pymalloc STREQUAL "OFF" AND unicode STREQUAL "ON")
set (abiflags "u")
endif()
if (pydebug STREQUAL "ON")
if (abiflags)
list (TRANSFORM abiflags PREPEND "d")
else()
set (abiflags "d")
endif()
elseif (pydebug STREQUAL "ANY")
if (abiflags)
set (flags "${abiflags}")
list (TRANSFORM flags PREPEND "d")
list (APPEND abiflags "${flags}")
else()
set (abiflags "" "d")
endif()
endif()
set (${_PGABIFLAGS} "${abiflags}" PARENT_SCOPE)
endfunction()
function (_PYTHON_GET_PATH_SUFFIXES _PYTHON_PGPS_PATH_SUFFIXES)
cmake_parse_arguments (PARSE_ARGV 1 _PGPS "INTERPRETER;COMPILER;LIBRARY;INCLUDE" "" "IMPLEMENTATIONS;VERSION")
if (NOT _PGPS_IMPLEMENTATIONS)
set (_PGPS_IMPLEMENTATIONS ${_${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS})
endif()
if (DEFINED _${_PYTHON_PREFIX}_ABIFLAGS)
set (abi "${_${_PYTHON_PREFIX}_ABIFLAGS}")
else()
set (abi "mu" "m" "u" "")
endif()
set (path_suffixes)
foreach (implementation IN LISTS _PGPS_IMPLEMENTATIONS)
if (implementation STREQUAL "CPython")
if (_PGPS_INTERPRETER)
list (APPEND path_suffixes bin Scripts)
else()
foreach (version IN LISTS _PGPS_VERSION)
if (_PGPS_LIBRARY)
if (CMAKE_LIBRARY_ARCHITECTURE)
list (APPEND path_suffixes lib/${CMAKE_LIBRARY_ARCHITECTURE})
endif()
list (APPEND path_suffixes lib libs)
if (CMAKE_LIBRARY_ARCHITECTURE)
set (suffixes "${abi}")
if (suffixes)
list (TRANSFORM suffixes PREPEND "lib/python${_PGPS_VERSION}/config-${_PGPS_VERSION}")
list (TRANSFORM suffixes APPEND "-${CMAKE_LIBRARY_ARCHITECTURE}")
else()
set (suffixes "lib/python${_PGPS_VERSION}/config-${_PGPS_VERSION}-${CMAKE_LIBRARY_ARCHITECTURE}")
endif()
list (APPEND path_suffixes ${suffixes})
endif()
set (suffixes "${abi}")
if (suffixes)
list (TRANSFORM suffixes PREPEND "lib/python${_PGPS_VERSION}/config-${_PGPS_VERSION}")
else()
set (suffixes "lib/python${_PGPS_VERSION}/config-${_PGPS_VERSION}")
endif()
list (APPEND path_suffixes ${suffixes})
elseif (_PGPS_INCLUDE)
set (suffixes "${abi}")
if (suffixes)
list (TRANSFORM suffixes PREPEND "include/python${_PGPS_VERSION}")
else()
set (suffixes "include/python${_PGPS_VERSION}")
endif()
list (APPEND path_suffixes ${suffixes} include)
endif()
endforeach()
endif()
elseif (implementation STREQUAL "IronPython")
if (_PGPS_INTERPRETER OR _PGPS_COMPILER)
foreach (version IN LISTS _PGPS_VERSION)
list (APPEND path_suffixes "share/ironpython${version}")
endforeach()
list (APPEND path_suffixes ${_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES})
endif()
elseif (implementation STREQUAL "PyPy")
if (_PGPS_INTERPRETER)
list (APPEND path_suffixes ${_${_PYTHON_PREFIX}_PYPY_EXECUTABLE_PATH_SUFFIXES})
elseif (_PGPS_LIBRARY)
list (APPEND path_suffixes ${_${_PYTHON_PREFIX}_PYPY_LIBRARY_PATH_SUFFIXES})
elseif (_PGPS_INCLUDE)
list (APPEND path_suffixes ${_${_PYTHON_PREFIX}_PYPY_INCLUDE_PATH_SUFFIXES})
endif()
endif()
endforeach()
list (REMOVE_DUPLICATES path_suffixes)
set (${_PYTHON_PGPS_PATH_SUFFIXES} ${path_suffixes} PARENT_SCOPE)
endfunction()
function (_PYTHON_GET_NAMES _PYTHON_PGN_NAMES)
cmake_parse_arguments (PARSE_ARGV 1 _PGN "POSIX;INTERPRETER;COMPILER;CONFIG;LIBRARY;WIN32;DEBUG" "" "IMPLEMENTATIONS;VERSION")
if (NOT _PGN_IMPLEMENTATIONS)
set (_PGN_IMPLEMENTATIONS ${_${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS})
endif()
set (names)
foreach (implementation IN LISTS _PGN_IMPLEMENTATIONS)
if (implementation STREQUAL "CPython")
if (_PGN_INTERPRETER AND _${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES STREQUAL "FIRST")
list (APPEND names python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR} python)
endif()
foreach (version IN LISTS _PGN_VERSION)
if (_PGN_WIN32)
string (REPLACE "." "" version_no_dots ${version})
set (name python${version_no_dots})
if (_PGN_DEBUG)
string (APPEND name "_d")
endif()
list (APPEND names "${name}")
endif()
if (_PGN_POSIX)
if (DEFINED _${_PYTHON_PREFIX}_ABIFLAGS)
set (abi "${_${_PYTHON_PREFIX}_ABIFLAGS}")
else()
if (_PGN_INTERPRETER OR _PGN_CONFIG)
set (abi "")
else()
set (abi "mu" "m" "u" "")
endif()
endif()
if (abi)
if (_PGN_CONFIG AND DEFINED CMAKE_LIBRARY_ARCHITECTURE)
set (abinames "${abi}")
list (TRANSFORM abinames PREPEND "${CMAKE_LIBRARY_ARCHITECTURE}-python${version}")
list (TRANSFORM abinames APPEND "-config")
list (APPEND names ${abinames})
endif()
set (abinames "${abi}")
list (TRANSFORM abinames PREPEND "python${version}")
if (_PGN_CONFIG)
list (TRANSFORM abinames APPEND "-config")
endif()
list (APPEND names ${abinames})
else()
unset (abinames)
if (_PGN_CONFIG AND DEFINED CMAKE_LIBRARY_ARCHITECTURE)
set (abinames "${CMAKE_LIBRARY_ARCHITECTURE}-python${version}")
endif()
list (APPEND abinames "python${version}")
if (_PGN_CONFIG)
list (TRANSFORM abinames APPEND "-config")
endif()
list (APPEND names ${abinames})
endif()
endif()
endforeach()
if (_PGN_INTERPRETER AND _${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES STREQUAL "LAST")
list (APPEND names python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR} python)
endif()
elseif (implementation STREQUAL "IronPython")
if (_PGN_INTERPRETER)
if (NOT CMAKE_SYSTEM_NAME STREQUAL "Linux")
# Do not use wrapper script on Linux because it is buggy: -c interpreter option cannot be used
foreach (version IN LISTS _PGN_VERSION)
list (APPEND names "ipy${version}")
endforeach()
endif()
list (APPEND names ${_${_PYTHON_PREFIX}_IRON_PYTHON_INTERPRETER_NAMES})
elseif (_PGN_COMPILER)
list (APPEND names ${_${_PYTHON_PREFIX}_IRON_PYTHON_COMPILER_NAMES})
endif()
elseif (implementation STREQUAL "PyPy")
if (_PGN_INTERPRETER)
list (APPEND names ${_${_PYTHON_PREFIX}_PYPY_NAMES})
elseif (_PGN_LIBRARY)
if (_PGN_WIN32)
foreach (version IN LISTS _PGN_VERSION)
string (REPLACE "." "" version_no_dots ${version})
set (name "python${version_no_dots}")
if (_PGN_DEBUG)
string (APPEND name "_d")
endif()
list (APPEND names "${name}")
endforeach()
endif()
list (APPEND names ${_${_PYTHON_PREFIX}_PYPY_LIB_NAMES})
endif()
endif()
endforeach()
set (${_PYTHON_PGN_NAMES} ${names} PARENT_SCOPE)
endfunction()
function (_PYTHON_GET_CONFIG_VAR _PYTHON_PGCV_VALUE NAME)
unset (${_PYTHON_PGCV_VALUE} PARENT_SCOPE)
if (NOT NAME MATCHES "^(PREFIX|ABIFLAGS|CONFIGDIR|INCLUDES|LIBS|SOABI)$")
return()
endif()
if (_${_PYTHON_PREFIX}_CONFIG)
if (NAME STREQUAL "SOABI")
set (config_flag "--extension-suffix")
else()
set (config_flag "--${NAME}")
endif()
string (TOLOWER "${config_flag}" config_flag)
execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" ${config_flag}
RESULT_VARIABLE _result
OUTPUT_VARIABLE _values
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_result)
unset (_values)
else()
if (NAME STREQUAL "INCLUDES")
# do some clean-up
string (REGEX MATCHALL "(-I|-iwithsysroot)[ ]*[^ ]+" _values "${_values}")
string (REGEX REPLACE "(-I|-iwithsysroot)[ ]*" "" _values "${_values}")
list (REMOVE_DUPLICATES _values)
elseif (NAME STREQUAL "SOABI")
# clean-up: remove prefix character and suffix
if (_values MATCHES "^(\\.${CMAKE_SHARED_LIBRARY_SUFFIX}|\\.so|\\.pyd)$")
set(_values "")
else()
string (REGEX REPLACE "^[.-](.+)(${CMAKE_SHARED_LIBRARY_SUFFIX}|\\.(so|pyd))$" "\\1" _values "${_values}")
endif()
endif()
endif()
endif()
if (_${_PYTHON_PREFIX}_EXECUTABLE AND NOT CMAKE_CROSSCOMPILING)
if (NAME STREQUAL "PREFIX")
execute_process (COMMAND ${_${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c "import sys\ntry:\n from distutils import sysconfig\n sys.stdout.write(';'.join([sysconfig.PREFIX,sysconfig.EXEC_PREFIX,sysconfig.BASE_EXEC_PREFIX]))\nexcept Exception:\n import sysconfig\n sys.stdout.write(';'.join([sysconfig.get_config_var('base') or '', sysconfig.get_config_var('installed_base') or '']))"
RESULT_VARIABLE _result
OUTPUT_VARIABLE _values
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_result)
unset (_values)
else()
list (REMOVE_DUPLICATES _values)
endif()
elseif (NAME STREQUAL "INCLUDES")
if (WIN32)
set (_scheme "nt")
else()
set (_scheme "posix_prefix")
endif()
execute_process (COMMAND ${_${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys\ntry:\n from distutils import sysconfig\n sys.stdout.write(';'.join([sysconfig.get_python_inc(plat_specific=True),sysconfig.get_python_inc(plat_specific=False)]))\nexcept Exception:\n import sysconfig\n sys.stdout.write(';'.join([sysconfig.get_path('platinclude'),sysconfig.get_path('platinclude','${_scheme}'),sysconfig.get_path('include'),sysconfig.get_path('include','${_scheme}')]))"
RESULT_VARIABLE _result
OUTPUT_VARIABLE _values
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_result)
unset (_values)
else()
list (REMOVE_DUPLICATES _values)
endif()
elseif (NAME STREQUAL "SOABI")
execute_process (COMMAND ${_${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys\ntry:\n from distutils import sysconfig\n sys.stdout.write(';'.join([sysconfig.get_config_var('SOABI') or '',sysconfig.get_config_var('EXT_SUFFIX') or '',sysconfig.get_config_var('SO') or '']))\nexcept Exception:\n import sysconfig;sys.stdout.write(';'.join([sysconfig.get_config_var('SOABI') or '',sysconfig.get_config_var('EXT_SUFFIX') or '',sysconfig.get_config_var('SO') or '']))"
RESULT_VARIABLE _result
OUTPUT_VARIABLE _soabi
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_result)
unset (_values)
else()
foreach (_item IN LISTS _soabi)
if (_item)
set (_values "${_item}")
break()
endif()
endforeach()
if (_values)
# clean-up: remove prefix character and suffix
if (_values MATCHES "^(\\.${CMAKE_SHARED_LIBRARY_SUFFIX}|\\.so|\\.pyd)$")
set(_values "")
else()
string (REGEX REPLACE "^[.-](.+)(${CMAKE_SHARED_LIBRARY_SUFFIX}|\\.(so|pyd))$" "\\1" _values "${_values}")
endif()
endif()
endif()
else()
set (config_flag "${NAME}")
if (NAME STREQUAL "CONFIGDIR")
set (config_flag "LIBPL")
endif()
execute_process (COMMAND ${_${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys\ntry:\n from distutils import sysconfig\n sys.stdout.write(sysconfig.get_config_var('${config_flag}'))\nexcept Exception:\n import sysconfig\n sys.stdout.write(sysconfig.get_config_var('${config_flag}'))"
RESULT_VARIABLE _result
OUTPUT_VARIABLE _values
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_result)
unset (_values)
endif()
endif()
endif()
if (NAME STREQUAL "ABIFLAGS" OR NAME STREQUAL "SOABI")
set (${_PYTHON_PGCV_VALUE} "${_values}" PARENT_SCOPE)
return()
endif()
if (NOT _values OR _values STREQUAL "None")
return()
endif()
if (NAME STREQUAL "LIBS")
# do some clean-up
string (REGEX MATCHALL "-(l|framework)[ ]*[^ ]+" _values "${_values}")
# remove elements relative to python library itself
list (FILTER _values EXCLUDE REGEX "-lpython")
list (REMOVE_DUPLICATES _values)
endif()
if (WIN32 AND NAME MATCHES "^(PREFIX|CONFIGDIR|INCLUDES)$")
file (TO_CMAKE_PATH "${_values}" _values)
endif()
set (${_PYTHON_PGCV_VALUE} "${_values}" PARENT_SCOPE)
endfunction()
function (_PYTHON_GET_VERSION)
cmake_parse_arguments (PARSE_ARGV 0 _PGV "LIBRARY;INCLUDE" "PREFIX" "")
unset (${_PGV_PREFIX}VERSION PARENT_SCOPE)
unset (${_PGV_PREFIX}VERSION_MAJOR PARENT_SCOPE)
unset (${_PGV_PREFIX}VERSION_MINOR PARENT_SCOPE)
unset (${_PGV_PREFIX}VERSION_PATCH PARENT_SCOPE)
unset (${_PGV_PREFIX}ABI PARENT_SCOPE)
if (_PGV_LIBRARY)
# retrieve version and abi from library name
if (_${_PYTHON_PREFIX}_LIBRARY_RELEASE)
get_filename_component (library_name "${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}" NAME)
# extract version from library name
if (library_name MATCHES "python([23])([0-9]+)")
set (${_PGV_PREFIX}VERSION_MAJOR "${CMAKE_MATCH_1}" PARENT_SCOPE)
set (${_PGV_PREFIX}VERSION_MINOR "${CMAKE_MATCH_2}" PARENT_SCOPE)
set (${_PGV_PREFIX}VERSION "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}" PARENT_SCOPE)
set (${_PGV_PREFIX}ABI "" PARENT_SCOPE)
elseif (library_name MATCHES "python([23])\\.([0-9]+)([dmu]*)")
set (${_PGV_PREFIX}VERSION_MAJOR "${CMAKE_MATCH_1}" PARENT_SCOPE)
set (${_PGV_PREFIX}VERSION_MINOR "${CMAKE_MATCH_2}" PARENT_SCOPE)
set (${_PGV_PREFIX}VERSION "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}" PARENT_SCOPE)
set (${_PGV_PREFIX}ABI "${CMAKE_MATCH_3}" PARENT_SCOPE)
elseif (library_name MATCHES "pypy(3)?-c")
set (version "${CMAKE_MATCH_1}")
if (version EQUAL "3")
set (${_PGV_PREFIX}VERSION_MAJOR "3" PARENT_SCOPE)
set (${_PGV_PREFIX}VERSION "3" PARENT_SCOPE)
else()
set (${_PGV_PREFIX}VERSION_MAJOR "2" PARENT_SCOPE)
set (${_PGV_PREFIX}VERSION "2" PARENT_SCOPE)
endif()
set (${_PGV_PREFIX}ABI "" PARENT_SCOPE)
endif()
endif()
else()
if (_${_PYTHON_PREFIX}_INCLUDE_DIR)
# retrieve version from header file
file (STRINGS "${_${_PYTHON_PREFIX}_INCLUDE_DIR}/patchlevel.h" version
REGEX "^#define[ \t]+PY_VERSION[ \t]+\"[^\"]+\"")
string (REGEX REPLACE "^#define[ \t]+PY_VERSION[ \t]+\"([^\"]+)\".*" "\\1"
version "${version}")
string (REGEX MATCHALL "[0-9]+" versions "${version}")
list (GET versions 0 version_major)
list (GET versions 1 version_minor)
list (GET versions 2 version_patch)
set (${_PGV_PREFIX}VERSION "${version_major}.${version_minor}.${version_patch}" PARENT_SCOPE)
set (${_PGV_PREFIX}VERSION_MAJOR ${version_major} PARENT_SCOPE)
set (${_PGV_PREFIX}VERSION_MINOR ${version_minor} PARENT_SCOPE)
set (${_PGV_PREFIX}VERSION_PATCH ${version_patch} PARENT_SCOPE)
# compute ABI flags
if (version_major VERSION_GREATER "2")
file (STRINGS "${_${_PYTHON_PREFIX}_INCLUDE_DIR}/pyconfig.h" config REGEX "(Py_DEBUG|WITH_PYMALLOC|Py_UNICODE_SIZE|MS_WIN32)")
set (abi)
if (config MATCHES "#[ ]*define[ ]+MS_WIN32")
# ABI not used on Windows
set (abi "")
else()
if (NOT config)
# pyconfig.h can be a wrapper to a platform specific pyconfig.h
# In this case, try to identify ABI from include directory
if (_${_PYTHON_PREFIX}_INCLUDE_DIR MATCHES "python${version_major}\\.${version_minor}+([dmu]*)")
set (abi "${CMAKE_MATCH_1}")
else()
set (abi "")
endif()
else()
if (config MATCHES "#[ ]*define[ ]+Py_DEBUG[ ]+1")
string (APPEND abi "d")
endif()
if (config MATCHES "#[ ]*define[ ]+WITH_PYMALLOC[ ]+1")
string (APPEND abi "m")
endif()
if (config MATCHES "#[ ]*define[ ]+Py_UNICODE_SIZE[ ]+4")
string (APPEND abi "u")
endif()
endif()
set (${_PGV_PREFIX}ABI "${abi}" PARENT_SCOPE)
endif()
else()
# ABI not supported
set (${_PGV_PREFIX}ABI "" PARENT_SCOPE)
endif()
endif()
endif()
endfunction()
function (_PYTHON_GET_LAUNCHER _PYTHON_PGL_NAME)
cmake_parse_arguments (PARSE_ARGV 1 _PGL "INTERPRETER;COMPILER" "" "")
unset ({_PYTHON_PGL_NAME} PARENT_SCOPE)
if ((_PGL_INTERPRETER AND NOT _${_PYTHON_PREFIX}_EXECUTABLE)
OR (_PGL_COMPILER AND NOT _${_PYTHON_PREFIX}_COMPILER))
return()
endif()
if ("IronPython" IN_LIST _${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS
AND NOT SYSTEM_NAME MATCHES "Windows|Linux")
if (_PGL_INTERPRETER)
get_filename_component (name "${_${_PYTHON_PREFIX}_EXECUTABLE}" NAME)
get_filename_component (ext "${_${_PYTHON_PREFIX}_EXECUTABLE}" LAST_EXT)
if (name IN_LIST _${_PYTHON_PREFIX}_IRON_PYTHON_INTERPRETER_NAMES
AND ext STREQUAL ".exe")
set (${_PYTHON_PGL_NAME} "${${_PYTHON_PREFIX}_DOTNET_LAUNCHER}" PARENT_SCOPE)
endif()
else()
get_filename_component (name "${_${_PYTHON_PREFIX}_COMPILER}" NAME)
get_filename_component (ext "${_${_PYTHON_PREFIX}_COMPILER}" LAST_EXT)
if (name IN_LIST _${_PYTHON_PREFIX}_IRON_PYTHON_COMPILER_NAMES
AND ext STREQUAL ".exe")
set (${_PYTHON_PGL_NAME} "${${_PYTHON_PREFIX}_DOTNET_LAUNCHER}" PARENT_SCOPE)
endif()
endif()
endif()
endfunction()
function (_PYTHON_VALIDATE_INTERPRETER)
if (NOT _${_PYTHON_PREFIX}_EXECUTABLE)
return()
endif()
cmake_parse_arguments (PARSE_ARGV 0 _PVI "IN_RANGE;EXACT;CHECK_EXISTS" "VERSION" "")
if (_PVI_CHECK_EXISTS AND NOT EXISTS "${_${_PYTHON_PREFIX}_EXECUTABLE}")
# interpreter does not exist anymore
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Cannot find the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_EXECUTABLE PROPERTY VALUE "${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND")
return()
endif()
_python_get_launcher (launcher INTERPRETER)
# validate ABI compatibility
if (DEFINED _${_PYTHON_PREFIX}_FIND_ABI)
execute_process (COMMAND ${launcher} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys; sys.stdout.write(sys.abiflags)"
RESULT_VARIABLE result
OUTPUT_VARIABLE abi
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (result)
# assume ABI is not supported
set (abi "")
endif()
if (NOT abi IN_LIST _${_PYTHON_PREFIX}_ABIFLAGS)
# incompatible ABI
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Wrong ABI for the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_EXECUTABLE PROPERTY VALUE "${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND")
return()
endif()
endif()
if (_PVI_IN_RANGE OR _PVI_VERSION)
# retrieve full version
execute_process (COMMAND ${launcher} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys; sys.stdout.write('.'.join([str(x) for x in sys.version_info[:3]]))"
RESULT_VARIABLE result
OUTPUT_VARIABLE version
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (result)
# interpreter is not usable
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Cannot use the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_EXECUTABLE PROPERTY VALUE "${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND")
return()
endif()
if (_PVI_VERSION)
# check against specified version
## compute number of components for version
string (REGEX REPLACE "[^.]" "" dots "${_PVI_VERSION}")
## add one dot because there is one dot less than there are components
string (LENGTH "${dots}." count)
if (count GREATER 3)
set (count 3)
endif()
set (version_regex "^[0-9]+")
if (count EQUAL 3)
string (APPEND version_regex "\\.[0-9]+\\.[0-9]+")
elseif (count EQUAL 2)
string (APPEND version_regex "\\.[0-9]+")
endif()
# extract needed range
string (REGEX MATCH "${version_regex}" version "${version}")
if (_PVI_EXACT AND NOT version VERSION_EQUAL _PVI_VERSION)
# interpreter has wrong version
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Wrong version for the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_EXECUTABLE PROPERTY VALUE "${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND")
return()
else()
# check that version is OK
string(REGEX REPLACE "^([0-9]+)\\.?.*$" "\\1" major_version "${version}")
string(REGEX REPLACE "^([0-9]+)\\.?.*$" "\\1" expected_major_version "${_PVI_VERSION}")
if (NOT major_version VERSION_EQUAL expected_major_version
OR NOT version VERSION_GREATER_EQUAL _PVI_VERSION)
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Wrong version for the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_EXECUTABLE PROPERTY VALUE "${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND")
return()
endif()
endif()
endif()
if (_PVI_IN_RANGE)
# check if version is in the requested range
find_package_check_version ("${version}" in_range HANDLE_VERSION_RANGE)
if (NOT in_range)
# interpreter has invalid version
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Wrong version for the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_EXECUTABLE PROPERTY VALUE "${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND")
return()
endif()
endif()
else()
get_filename_component (python_name "${_${_PYTHON_PREFIX}_EXECUTABLE}" NAME)
if (NOT python_name STREQUAL "python${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}${CMAKE_EXECUTABLE_SUFFIX}")
# executable found do not have version in name
# ensure major version is OK
execute_process (COMMAND ${launcher} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys; sys.stdout.write(str(sys.version_info[0]))"
RESULT_VARIABLE result
OUTPUT_VARIABLE version
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (result OR NOT version EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
# interpreter not usable or has wrong major version
if (result)
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Cannot use the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"" PARENT_SCOPE)
else()
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Wrong major version for the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"" PARENT_SCOPE)
endif()
set_property (CACHE _${_PYTHON_PREFIX}_EXECUTABLE PROPERTY VALUE "${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND")
return()
endif()
endif()
endif()
if (CMAKE_SIZEOF_VOID_P AND ("Development.Module" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
OR "Development.Embed" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
AND NOT CMAKE_CROSSCOMPILING)
# In this case, interpreter must have same architecture as environment
execute_process (COMMAND ${launcher} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys, struct; sys.stdout.write(str(struct.calcsize(\"P\")))"
RESULT_VARIABLE result
OUTPUT_VARIABLE size
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (result OR NOT size EQUAL CMAKE_SIZEOF_VOID_P)
# interpreter not usable or has wrong architecture
if (result)
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Cannot use the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"" PARENT_SCOPE)
else()
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Wrong architecture for the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"" PARENT_SCOPE)
endif()
set_property (CACHE _${_PYTHON_PREFIX}_EXECUTABLE PROPERTY VALUE "${_PYTHON_PREFIX}_EXECUTABLE-NOTFOUND")
return()
endif()
endif()
endfunction()
function (_PYTHON_VALIDATE_COMPILER)
if (NOT _${_PYTHON_PREFIX}_COMPILER)
return()
endif()
cmake_parse_arguments (PARSE_ARGV 0 _PVC "IN_RANGE;EXACT;CHECK_EXISTS" "VERSION" "")
if (_PVC_CHECK_EXISTS AND NOT EXISTS "${_${_PYTHON_PREFIX}_COMPILER}")
# Compiler does not exist anymore
set (_${_PYTHON_PREFIX}_Compiler_REASON_FAILURE "Cannot find the compiler \"${_${_PYTHON_PREFIX}_COMPILER}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_COMPILER PROPERTY VALUE "${_PYTHON_PREFIX}_COMPILER-NOTFOUND")
return()
endif()
_python_get_launcher (launcher COMPILER)
# retrieve python environment version from compiler
set (working_dir "${CMAKE_CURRENT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/PythonCompilerVersion.dir")
file (WRITE "${working_dir}/version.py" "import sys; sys.stdout.write('.'.join([str(x) for x in sys.version_info[:3]]))\n")
execute_process (COMMAND ${launcher} "${_${_PYTHON_PREFIX}_COMPILER}"
${_${_PYTHON_PREFIX}_IRON_PYTHON_COMPILER_ARCH_FLAGS}
/target:exe /embed "${working_dir}/version.py"
WORKING_DIRECTORY "${working_dir}"
OUTPUT_QUIET
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
get_filename_component (ir_dir "${_${_PYTHON_PREFIX}_COMPILER}" DIRECTORY)
execute_process (COMMAND "${CMAKE_COMMAND}" -E env "MONO_PATH=${ir_dir}"
${${_PYTHON_PREFIX}_DOTNET_LAUNCHER} "${working_dir}/version.exe"
WORKING_DIRECTORY "${working_dir}"
RESULT_VARIABLE result
OUTPUT_VARIABLE version
ERROR_QUIET)
file (REMOVE_RECURSE "${working_dir}")
if (result)
# compiler is not usable
set (_${_PYTHON_PREFIX}_Compiler_REASON_FAILURE "Cannot use the compiler \"${_${_PYTHON_PREFIX}_COMPILER}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_COMPILER PROPERTY VALUE "${_PYTHON_PREFIX}_COMPILER-NOTFOUND")
return()
endif()
if (_PVC_VERSION OR _PVC_IN_RANGE)
if (_PVC_VERSION)
# check against specified version
## compute number of components for version
string (REGEX REPLACE "[^.]" "" dots "${_PVC_VERSION}")
## add one dot because there is one dot less than there are components
string (LENGTH "${dots}." count)
if (count GREATER 3)
set (count 3)
endif()
set (version_regex "^[0-9]+")
if (count EQUAL 3)
string (APPEND version_regex "\\.[0-9]+\\.[0-9]+")
elseif (count EQUAL 2)
string (APPEND version_regex "\\.[0-9]+")
endif()
# extract needed range
string (REGEX MATCH "${version_regex}" version "${version}")
if (_PVC_EXACT AND NOT version VERSION_EQUAL _PVC_VERSION)
# interpreter has wrong version
set (_${_PYTHON_PREFIX}_Compiler_REASON_FAILURE "Wrong version for the compiler \"${_${_PYTHON_PREFIX}_COMPILER}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_COMPILER PROPERTY VALUE "${_PYTHON_PREFIX}_COMPILER-NOTFOUND")
return()
else()
# check that version is OK
string(REGEX REPLACE "^([0-9]+)\\.?.*$" "\\1" major_version "${version}")
string(REGEX REPLACE "^([0-9]+)\\.?.*$" "\\1" expected_major_version "${_PVC_VERSION}")
if (NOT major_version VERSION_EQUAL expected_major_version
OR NOT version VERSION_GREATER_EQUAL _PVC_VERSION)
set (_${_PYTHON_PREFIX}_Compiler_REASON_FAILURE "Wrong version for the compiler \"${_${_PYTHON_PREFIX}_COMPILER}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_COMPILER PROPERTY VALUE "${_PYTHON_PREFIX}_COMPILER-NOTFOUND")
return()
endif()
endif()
endif()
if (_PVC_IN_RANGE)
# check if version is in the requested range
find_package_check_version ("${version}" in_range HANDLE_VERSION_RANGE)
if (NOT in_range)
# interpreter has invalid version
set (_${_PYTHON_PREFIX}_Compiler_REASON_FAILURE "Wrong version for the compiler \"${_${_PYTHON_PREFIX}_COMPILER}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_COMPILER PROPERTY VALUE "${_PYTHON_PREFIX}_COMPILER-NOTFOUND")
return()
endif()
endif()
else()
string(REGEX REPLACE "^([0-9]+)\\.?.*$" "\\1" major_version "${version}")
if (NOT major_version EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
# Compiler has wrong major version
set (_${_PYTHON_PREFIX}_Compiler_REASON_FAILURE "Wrong major version for the compiler \"${_${_PYTHON_PREFIX}_COMPILER}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_COMPILER PROPERTY VALUE "${_PYTHON_PREFIX}_COMPILER-NOTFOUND")
return()
endif()
endif()
endfunction()
function (_PYTHON_VALIDATE_LIBRARY)
if (NOT _${_PYTHON_PREFIX}_LIBRARY_RELEASE)
unset (_${_PYTHON_PREFIX}_LIBRARY_DEBUG)
return()
endif()
cmake_parse_arguments (PARSE_ARGV 0 _PVL "IN_RANGE;EXACT;CHECK_EXISTS" "VERSION" "")
if (_PVL_CHECK_EXISTS AND NOT EXISTS "${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}")
# library does not exist anymore
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Cannot find the library \"${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_LIBRARY_RELEASE PROPERTY VALUE "${_PYTHON_PREFIX}_LIBRARY_RELEASE-NOTFOUND")
if (WIN32)
set_property (CACHE _${_PYTHON_PREFIX}_LIBRARY_DEBUG PROPERTY VALUE "${_PYTHON_PREFIX}_LIBRARY_DEBUG-NOTFOUND")
endif()
set_property (CACHE _${_PYTHON_PREFIX}_INCLUDE_DIR PROPERTY VALUE "${_PYTHON_PREFIX}_INCLUDE_DIR-NOTFOUND")
return()
endif()
# retrieve version and abi from library name
_python_get_version (LIBRARY PREFIX lib_)
if (DEFINED _${_PYTHON_PREFIX}_FIND_ABI AND NOT lib_ABI IN_LIST _${_PYTHON_PREFIX}_ABIFLAGS)
# incompatible ABI
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Wrong ABI for the library \"${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_LIBRARY_RELEASE PROPERTY VALUE "${_PYTHON_PREFIX}_LIBRARY_RELEASE-NOTFOUND")
else()
if (_PVL_VERSION OR _PVL_IN_RANGE)
if (_PVL_VERSION)
# library have only major.minor information
string (REGEX MATCH "[0-9](\\.[0-9]+)?" version "${_PVL_VERSION}")
if ((_PVL_EXACT AND NOT lib_VERSION VERSION_EQUAL version) OR (lib_VERSION VERSION_LESS version))
# library has wrong version
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Wrong version for the library \"${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_LIBRARY_RELEASE PROPERTY VALUE "${_PYTHON_PREFIX}_LIBRARY_RELEASE-NOTFOUND")
endif()
endif()
if (_${_PYTHON_PREFIX}_LIBRARY_RELEASE AND _PVL_IN_RANGE)
# check if library version is in the requested range
find_package_check_version ("${lib_VERSION}" in_range HANDLE_VERSION_RANGE)
if (NOT in_range)
# library has wrong version
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Wrong version for the library \"${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_LIBRARY_RELEASE PROPERTY VALUE "${_PYTHON_PREFIX}_LIBRARY_RELEASE-NOTFOUND")
endif()
endif()
else()
if (NOT lib_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
# library has wrong major version
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Wrong major version for the library \"${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_LIBRARY_RELEASE PROPERTY VALUE "${_PYTHON_PREFIX}_LIBRARY_RELEASE-NOTFOUND")
endif()
endif()
endif()
if (NOT _${_PYTHON_PREFIX}_LIBRARY_RELEASE)
if (WIN32)
set_property (CACHE _${_PYTHON_PREFIX}_LIBRARY_DEBUG PROPERTY VALUE "${_PYTHON_PREFIX}_LIBRARY_DEBUG-NOTFOUND")
endif()
unset (_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE CACHE)
unset (_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG CACHE)
set_property (CACHE _${_PYTHON_PREFIX}_INCLUDE_DIR PROPERTY VALUE "${_PYTHON_PREFIX}_INCLUDE_DIR-NOTFOUND")
endif()
endfunction()
function (_PYTHON_VALIDATE_INCLUDE_DIR)
if (NOT _${_PYTHON_PREFIX}_INCLUDE_DIR)
return()
endif()
cmake_parse_arguments (PARSE_ARGV 0 _PVID "IN_RANGE;EXACT;CHECK_EXISTS" "VERSION" "")
if (_PVID_CHECK_EXISTS AND NOT EXISTS "${_${_PYTHON_PREFIX}_INCLUDE_DIR}")
# include file does not exist anymore
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Cannot find the directory \"${_${_PYTHON_PREFIX}_INCLUDE_DIR}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_INCLUDE_DIR PROPERTY VALUE "${_PYTHON_PREFIX}_INCLUDE_DIR-NOTFOUND")
return()
endif()
# retrieve version from header file
_python_get_version (INCLUDE PREFIX inc_)
if (DEFINED _${_PYTHON_PREFIX}_FIND_ABI AND NOT inc_ABI IN_LIST _${_PYTHON_PREFIX}_ABIFLAGS)
# incompatible ABI
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Wrong ABI for the directory \"${_${_PYTHON_PREFIX}_INCLUDE_DIR}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_INCLUDE_DIR PROPERTY VALUE "${_PYTHON_PREFIX}_INCLUDE_DIR-NOTFOUND")
else()
if (_PVID_VERSION OR _PVID_IN_RANGE)
if (_PVID_VERSION)
if ((_PVID_EXACT AND NOT inc_VERSION VERSION_EQUAL expected_version) OR (inc_VERSION VERSION_LESS expected_version))
# include dir has wrong version
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Wrong version for the directory \"${_${_PYTHON_PREFIX}_INCLUDE_DIR}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_INCLUDE_DIR PROPERTY VALUE "${_PYTHON_PREFIX}_INCLUDE_DIR-NOTFOUND")
endif()
endif()
if (_${_PYTHON_PREFIX}_INCLUDE_DIR AND PVID_IN_RANGE)
# check if include dir is in the request range
find_package_check_version ("${inc_VERSION}" in_range HANDLE_VERSION_RANGE)
if (NOT in_range)
# include dir has wrong version
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Wrong version for the directory \"${_${_PYTHON_PREFIX}_INCLUDE_DIR}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_INCLUDE_DIR PROPERTY VALUE "${_PYTHON_PREFIX}_INCLUDE_DIR-NOTFOUND")
endif()
endif()
else()
if (NOT inc_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
# include dir has wrong major version
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Wrong major version for the directory \"${_${_PYTHON_PREFIX}_INCLUDE_DIR}\"" PARENT_SCOPE)
set_property (CACHE _${_PYTHON_PREFIX}_INCLUDE_DIR PROPERTY VALUE "${_PYTHON_PREFIX}_INCLUDE_DIR-NOTFOUND")
endif()
endif()
endif()
endfunction()
function (_PYTHON_FIND_RUNTIME_LIBRARY _PYTHON_LIB)
string (REPLACE "_RUNTIME" "" _PYTHON_LIB "${_PYTHON_LIB}")
# look at runtime part on systems supporting it
if (CMAKE_SYSTEM_NAME STREQUAL "Windows" OR
(CMAKE_SYSTEM_NAME MATCHES "MSYS|CYGWIN"
AND ${_PYTHON_LIB} MATCHES "${CMAKE_IMPORT_LIBRARY_SUFFIX}$"))
set (CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_SHARED_LIBRARY_SUFFIX})
# MSYS has a special syntax for runtime libraries
if (CMAKE_SYSTEM_NAME MATCHES "MSYS")
list (APPEND CMAKE_FIND_LIBRARY_PREFIXES "msys-")
endif()
find_library (${ARGV})
endif()
endfunction()
function (_PYTHON_SET_LIBRARY_DIRS _PYTHON_SLD_RESULT)
unset (_PYTHON_DIRS)
set (_PYTHON_LIBS ${ARGN})
foreach (_PYTHON_LIB IN LISTS _PYTHON_LIBS)
if (${_PYTHON_LIB})
get_filename_component (_PYTHON_DIR "${${_PYTHON_LIB}}" DIRECTORY)
list (APPEND _PYTHON_DIRS "${_PYTHON_DIR}")
endif()
endforeach()
list (REMOVE_DUPLICATES _PYTHON_DIRS)
set (${_PYTHON_SLD_RESULT} ${_PYTHON_DIRS} PARENT_SCOPE)
endfunction()
function (_PYTHON_SET_DEVELOPMENT_MODULE_FOUND module)
if ("Development.${module}" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
string(TOUPPER "${module}" id)
set (module_found TRUE)
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS
AND NOT _${_PYTHON_PREFIX}_LIBRARY_RELEASE)
set (module_found FALSE)
endif()
if ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS
AND NOT _${_PYTHON_PREFIX}_INCLUDE_DIR)
set (module_found FALSE)
endif()
set (${_PYTHON_PREFIX}_Development.${module}_FOUND ${module_found} PARENT_SCOPE)
endif()
endfunction()
if (${_PYTHON_PREFIX}_FIND_VERSION_RANGE)
# range must include internal major version
if (${_PYTHON_PREFIX}_FIND_VERSION_MIN_MAJOR VERSION_GREATER _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR
OR ((${_PYTHON_PREFIX}_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE"
AND ${_PYTHON_PREFIX}_FIND_VERSION_MAX VERSION_LESS _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
OR (${_PYTHON_PREFIX}_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE"
AND ${_PYTHON_PREFIX}_FIND_VERSION_MAX VERSION_LESS_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)))
_python_display_failure ("Could NOT find ${_PYTHON_PREFIX}: Wrong version range specified is \"${${_PYTHON_PREFIX}_FIND_VERSION_RANGE}\", but expected version range must include major version \"${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}\"")
cmake_policy(POP)
return()
endif()
else()
if (DEFINED ${_PYTHON_PREFIX}_FIND_VERSION_MAJOR
AND NOT ${_PYTHON_PREFIX}_FIND_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
# If major version is specified, it must be the same as internal major version
_python_display_failure ("Could NOT find ${_PYTHON_PREFIX}: Wrong major version specified is \"${${_PYTHON_PREFIX}_FIND_VERSION_MAJOR}\", but expected major version is \"${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}\"")
cmake_policy(POP)
return()
endif()
endif()
# handle components
if (NOT ${_PYTHON_PREFIX}_FIND_COMPONENTS)
set (${_PYTHON_PREFIX}_FIND_COMPONENTS Interpreter)
set (${_PYTHON_PREFIX}_FIND_REQUIRED_Interpreter TRUE)
endif()
if ("NumPy" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
list (APPEND ${_PYTHON_PREFIX}_FIND_COMPONENTS "Interpreter" "Development.Module")
endif()
if ("Development" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
list (APPEND ${_PYTHON_PREFIX}_FIND_COMPONENTS "Development.Module" "Development.Embed")
endif()
list (REMOVE_DUPLICATES ${_PYTHON_PREFIX}_FIND_COMPONENTS)
foreach (_${_PYTHON_PREFIX}_COMPONENT IN ITEMS Interpreter Compiler Development Development.Module Development.Embed NumPy)
set (${_PYTHON_PREFIX}_${_${_PYTHON_PREFIX}_COMPONENT}_FOUND FALSE)
endforeach()
if (${_PYTHON_PREFIX}_FIND_REQUIRED_Development)
set (${_PYTHON_PREFIX}_FIND_REQUIRED_Development.Module TRUE)
set (${_PYTHON_PREFIX}_FIND_REQUIRED_Development.Embed TRUE)
endif()
unset (_${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS)
unset (_${_PYTHON_PREFIX}_FIND_DEVELOPMENT_MODULE_ARTIFACTS)
unset (_${_PYTHON_PREFIX}_FIND_DEVELOPMENT_EMBED_ARTIFACTS)
if ("Development.Module" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
if (CMAKE_SYSTEM_NAME MATCHES "^(Windows.*|CYGWIN|MSYS)$")
list (APPEND _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_MODULE_ARTIFACTS "LIBRARY")
endif()
list (APPEND _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_MODULE_ARTIFACTS "INCLUDE_DIR")
endif()
if ("Development.Embed" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
list (APPEND _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_EMBED_ARTIFACTS "LIBRARY" "INCLUDE_DIR")
endif()
set (_${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS ${_${_PYTHON_PREFIX}_FIND_DEVELOPMENT_MODULE_ARTIFACTS} ${_${_PYTHON_PREFIX}_FIND_DEVELOPMENT_EMBED_ARTIFACTS})
list (REMOVE_DUPLICATES _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS)
# Set versions to search
## default: search any version
set (_${_PYTHON_PREFIX}_FIND_VERSIONS ${_${_PYTHON_PREFIX}_VERSIONS})
unset (_${_PYTHON_PREFIX}_FIND_VERSION_EXACT)
if (${_PYTHON_PREFIX}_FIND_VERSION_RANGE)
unset (_${_PYTHON_PREFIX}_FIND_VERSIONS)
foreach (_${_PYTHON_PREFIX}_VERSION IN LISTS _${_PYTHON_PREFIX}_VERSIONS)
if ((${_PYTHON_PREFIX}_FIND_VERSION_RANGE_MIN STREQUAL "INCLUDE"
AND _${_PYTHON_PREFIX}_VERSION VERSION_GREATER_EQUAL ${_PYTHON_PREFIX}_FIND_VERSION_MIN)
AND ((${_PYTHON_PREFIX}_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE"
AND _${_PYTHON_PREFIX}_VERSION VERSION_LESS_EQUAL ${_PYTHON_PREFIX}_FIND_VERSION_MAX)
OR (${_PYTHON_PREFIX}_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE"
AND _${_PYTHON_PREFIX}_VERSION VERSION_LESS ${_PYTHON_PREFIX}_FIND_VERSION_MAX)))
list (APPEND _${_PYTHON_PREFIX}_FIND_VERSIONS ${_${_PYTHON_PREFIX}_VERSION})
endif()
endforeach()
else()
if (${_PYTHON_PREFIX}_FIND_VERSION_COUNT GREATER 1)
if (${_PYTHON_PREFIX}_FIND_VERSION_EXACT)
set (_${_PYTHON_PREFIX}_FIND_VERSION_EXACT "EXACT")
set (_${_PYTHON_PREFIX}_FIND_VERSIONS ${${_PYTHON_PREFIX}_FIND_VERSION_MAJOR}.${${_PYTHON_PREFIX}_FIND_VERSION_MINOR})
else()
unset (_${_PYTHON_PREFIX}_FIND_VERSIONS)
# add all compatible versions
foreach (_${_PYTHON_PREFIX}_VERSION IN LISTS _${_PYTHON_PREFIX}_VERSIONS)
if (_${_PYTHON_PREFIX}_VERSION VERSION_GREATER_EQUAL "${${_PYTHON_PREFIX}_FIND_VERSION_MAJOR}.${${_PYTHON_PREFIX}_FIND_VERSION_MINOR}")
list (APPEND _${_PYTHON_PREFIX}_FIND_VERSIONS ${_${_PYTHON_PREFIX}_VERSION})
endif()
endforeach()
endif()
endif()
endif()
# Set ABIs to search
## default: search any ABI
if (_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR VERSION_LESS "3")
# ABI not supported
unset (_${_PYTHON_PREFIX}_FIND_ABI)
set (_${_PYTHON_PREFIX}_ABIFLAGS "")
else()
unset (_${_PYTHON_PREFIX}_FIND_ABI)
unset (_${_PYTHON_PREFIX}_ABIFLAGS)
if (DEFINED ${_PYTHON_PREFIX}_FIND_ABI)
# normalization
string (TOUPPER "${${_PYTHON_PREFIX}_FIND_ABI}" _${_PYTHON_PREFIX}_FIND_ABI)
list (TRANSFORM _${_PYTHON_PREFIX}_FIND_ABI REPLACE "^(TRUE|Y(ES)?|1)$" "ON")
list (TRANSFORM _${_PYTHON_PREFIX}_FIND_ABI REPLACE "^(FALSE|N(O)?|0)$" "OFF")
if (NOT _${_PYTHON_PREFIX}_FIND_ABI MATCHES "^(ON|OFF|ANY);(ON|OFF|ANY);(ON|OFF|ANY)$")
message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: ${${_PYTHON_PREFIX}_FIND_ABI}: invalid value for '${_PYTHON_PREFIX}_FIND_ABI'. Ignore it")
unset (_${_PYTHON_PREFIX}_FIND_ABI)
endif()
_python_get_abiflags (_${_PYTHON_PREFIX}_ABIFLAGS)
endif()
endif()
unset (${_PYTHON_PREFIX}_SOABI)
# Define lookup strategy
cmake_policy (GET CMP0094 _${_PYTHON_PREFIX}_LOOKUP_POLICY)
if (_${_PYTHON_PREFIX}_LOOKUP_POLICY STREQUAL "NEW")
set (_${_PYTHON_PREFIX}_FIND_STRATEGY "LOCATION")
else()
set (_${_PYTHON_PREFIX}_FIND_STRATEGY "VERSION")
endif()
if (DEFINED ${_PYTHON_PREFIX}_FIND_STRATEGY)
if (NOT ${_PYTHON_PREFIX}_FIND_STRATEGY MATCHES "^(VERSION|LOCATION)$")
message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: ${${_PYTHON_PREFIX}_FIND_STRATEGY}: invalid value for '${_PYTHON_PREFIX}_FIND_STRATEGY'. 'VERSION' or 'LOCATION' expected.")
set (_${_PYTHON_PREFIX}_FIND_STRATEGY "VERSION")
else()
set (_${_PYTHON_PREFIX}_FIND_STRATEGY "${${_PYTHON_PREFIX}_FIND_STRATEGY}")
endif()
endif()
# Python and Anaconda distributions: define which architectures can be used
if (CMAKE_SIZEOF_VOID_P)
# In this case, search only for 64bit or 32bit
math (EXPR _${_PYTHON_PREFIX}_ARCH "${CMAKE_SIZEOF_VOID_P} * 8")
set (_${_PYTHON_PREFIX}_ARCH2 ${_${_PYTHON_PREFIX}_ARCH})
else()
# architecture unknown, search for both 64bit and 32bit
set (_${_PYTHON_PREFIX}_ARCH 64)
set (_${_PYTHON_PREFIX}_ARCH2 32)
endif()
# IronPython support
unset (_${_PYTHON_PREFIX}_IRON_PYTHON_INTERPRETER_NAMES)
unset (_${_PYTHON_PREFIX}_IRON_PYTHON_COMPILER_NAMES)
unset (_${_PYTHON_PREFIX}_IRON_PYTHON_COMPILER_ARCH_FLAGS)
if (CMAKE_SIZEOF_VOID_P)
if (_${_PYTHON_PREFIX}_ARCH EQUAL "32")
set (_${_PYTHON_PREFIX}_IRON_PYTHON_COMPILER_ARCH_FLAGS "/platform:x86")
else()
set (_${_PYTHON_PREFIX}_IRON_PYTHON_COMPILER_ARCH_FLAGS "/platform:x64")
endif()
endif()
if (NOT CMAKE_SYSTEM_NAME STREQUAL "Linux")
# Do not use wrapper script on Linux because it is buggy: -c interpreter option cannot be used
list (APPEND _${_PYTHON_PREFIX}_IRON_PYTHON_INTERPRETER_NAMES "ipy${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}" "ipy64" "ipy32" "ipy")
list (APPEND _${_PYTHON_PREFIX}_IRON_PYTHON_COMPILER_NAMES "ipyc")
endif()
list (APPEND _${_PYTHON_PREFIX}_IRON_PYTHON_INTERPRETER_NAMES "ipy.exe")
list (APPEND _${_PYTHON_PREFIX}_IRON_PYTHON_COMPILER_NAMES "ipyc.exe")
set (_${_PYTHON_PREFIX}_IRON_PYTHON_PATH_SUFFIXES net45 net40 bin)
# PyPy support
if (_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR EQUAL "3")
set (_${_PYTHON_PREFIX}_PYPY_NAMES pypy3)
set (_${_PYTHON_PREFIX}_PYPY_LIB_NAMES pypy3-c)
if (WIN32)
# special name for runtime part
list (APPEND _${_PYTHON_PREFIX}_PYPY_LIB_NAMES libpypy3-c)
endif()
set (_${_PYTHON_PREFIX}_PYPY_INCLUDE_PATH_SUFFIXES lib/pypy3)
else()
set (_${_PYTHON_PREFIX}_PYPY_NAMES pypy)
set (_${_PYTHON_PREFIX}_PYPY_LIB_NAMES pypy-c)
if (WIN32)
# special name for runtime part
list (APPEND _${_PYTHON_PREFIX}_PYPY_LIB_NAMES libpypy-c)
endif()
set (_${_PYTHON_PREFIX}_PYPY_INCLUDE_PATH_SUFFIXES lib/pypy)
endif()
set (_${_PYTHON_PREFIX}_PYPY_EXECUTABLE_PATH_SUFFIXES bin)
set (_${_PYTHON_PREFIX}_PYPY_LIBRARY_PATH_SUFFIXES lib libs bin)
list (APPEND _${_PYTHON_PREFIX}_PYPY_INCLUDE_PATH_SUFFIXES include)
# Python Implementations handling
unset (_${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS)
if (DEFINED ${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS)
foreach (_${_PYTHON_PREFIX}_IMPLEMENTATION IN LISTS ${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS)
if (NOT _${_PYTHON_PREFIX}_IMPLEMENTATION MATCHES "^(CPython|IronPython|PyPy)$")
message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: ${_${_PYTHON_PREFIX}_IMPLEMENTATION}: invalid value for '${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS'. 'CPython', 'IronPython' or 'PyPy' expected. Value will be ignored.")
else()
list (APPEND _${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS ${_${_PYTHON_PREFIX}_IMPLEMENTATION})
endif()
endforeach()
else()
if (WIN32)
set (_${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS CPython IronPython)
else()
set (_${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS CPython)
endif()
endif()
# compute list of names for header file
unset (_${_PYTHON_PREFIX}_INCLUDE_NAMES)
foreach (_${_PYTHON_PREFIX}_IMPLEMENTATION IN LISTS _${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS)
if (_${_PYTHON_PREFIX}_IMPLEMENTATION STREQUAL "CPython")
list (APPEND _${_PYTHON_PREFIX}_INCLUDE_NAMES "Python.h")
elseif (_${_PYTHON_PREFIX}_IMPLEMENTATION STREQUAL "PyPy")
list (APPEND _${_PYTHON_PREFIX}_INCLUDE_NAMES "PyPy.h")
endif()
endforeach()
# Apple frameworks handling
_python_find_frameworks ()
set (_${_PYTHON_PREFIX}_FIND_FRAMEWORK "FIRST")
if (DEFINED ${_PYTHON_PREFIX}_FIND_FRAMEWORK)
if (NOT ${_PYTHON_PREFIX}_FIND_FRAMEWORK MATCHES "^(FIRST|LAST|NEVER)$")
message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: ${${_PYTHON_PREFIX}_FIND_FRAMEWORK}: invalid value for '${_PYTHON_PREFIX}_FIND_FRAMEWORK'. 'FIRST', 'LAST' or 'NEVER' expected. 'FIRST' will be used instead.")
else()
set (_${_PYTHON_PREFIX}_FIND_FRAMEWORK ${${_PYTHON_PREFIX}_FIND_FRAMEWORK})
endif()
elseif (DEFINED CMAKE_FIND_FRAMEWORK)
if (CMAKE_FIND_FRAMEWORK STREQUAL "ONLY")
message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: CMAKE_FIND_FRAMEWORK: 'ONLY' value is not supported. 'FIRST' will be used instead.")
elseif (NOT CMAKE_FIND_FRAMEWORK MATCHES "^(FIRST|LAST|NEVER)$")
message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: ${CMAKE_FIND_FRAMEWORK}: invalid value for 'CMAKE_FIND_FRAMEWORK'. 'FIRST', 'LAST' or 'NEVER' expected. 'FIRST' will be used instead.")
else()
set (_${_PYTHON_PREFIX}_FIND_FRAMEWORK ${CMAKE_FIND_FRAMEWORK})
endif()
endif()
# Save CMAKE_FIND_APPBUNDLE
if (DEFINED CMAKE_FIND_APPBUNDLE)
set (_${_PYTHON_PREFIX}_CMAKE_FIND_APPBUNDLE ${CMAKE_FIND_APPBUNDLE})
else()
unset (_${_PYTHON_PREFIX}_CMAKE_FIND_APPBUNDLE)
endif()
# To avoid app bundle lookup
set (CMAKE_FIND_APPBUNDLE "NEVER")
# Save CMAKE_FIND_FRAMEWORK
if (DEFINED CMAKE_FIND_FRAMEWORK)
set (_${_PYTHON_PREFIX}_CMAKE_FIND_FRAMEWORK ${CMAKE_FIND_FRAMEWORK})
else()
unset (_${_PYTHON_PREFIX}_CMAKE_FIND_FRAMEWORK)
endif()
# To avoid framework lookup
set (CMAKE_FIND_FRAMEWORK "NEVER")
# Windows Registry handling
if (DEFINED ${_PYTHON_PREFIX}_FIND_REGISTRY)
if (NOT ${_PYTHON_PREFIX}_FIND_REGISTRY MATCHES "^(FIRST|LAST|NEVER)$")
message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: ${${_PYTHON_PREFIX}_FIND_REGISTRY}: invalid value for '${_PYTHON_PREFIX}_FIND_REGISTRY'. 'FIRST', 'LAST' or 'NEVER' expected. 'FIRST' will be used instead.")
set (_${_PYTHON_PREFIX}_FIND_REGISTRY "FIRST")
else()
set (_${_PYTHON_PREFIX}_FIND_REGISTRY ${${_PYTHON_PREFIX}_FIND_REGISTRY})
endif()
else()
set (_${_PYTHON_PREFIX}_FIND_REGISTRY "FIRST")
endif()
# virtual environments recognition
if (DEFINED ENV{VIRTUAL_ENV} OR DEFINED ENV{CONDA_PREFIX})
if (DEFINED ${_PYTHON_PREFIX}_FIND_VIRTUALENV)
if (NOT ${_PYTHON_PREFIX}_FIND_VIRTUALENV MATCHES "^(FIRST|ONLY|STANDARD)$")
message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: ${${_PYTHON_PREFIX}_FIND_VIRTUALENV}: invalid value for '${_PYTHON_PREFIX}_FIND_VIRTUALENV'. 'FIRST', 'ONLY' or 'STANDARD' expected. 'FIRST' will be used instead.")
set (_${_PYTHON_PREFIX}_FIND_VIRTUALENV "FIRST")
else()
set (_${_PYTHON_PREFIX}_FIND_VIRTUALENV ${${_PYTHON_PREFIX}_FIND_VIRTUALENV})
endif()
else()
set (_${_PYTHON_PREFIX}_FIND_VIRTUALENV FIRST)
endif()
else()
set (_${_PYTHON_PREFIX}_FIND_VIRTUALENV STANDARD)
endif()
# Python naming handling
if (DEFINED ${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES)
if (NOT ${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES MATCHES "^(FIRST|LAST|NEVER)$")
message (AUTHOR_WARNING "Find${_PYTHON_PREFIX}: ${_${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES}: invalid value for '${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES'. 'FIRST', 'LAST' or 'NEVER' expected. 'LAST' will be used instead.")
set (_${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES LAST)
else()
set (_${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES ${${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES})
endif()
else()
set (_${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES LAST)
endif()
# Compute search signature
# This signature will be used to check validity of cached variables on new search
set (_${_PYTHON_PREFIX}_SIGNATURE "${${_PYTHON_PREFIX}_ROOT_DIR}:${_${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS}:${_${_PYTHON_PREFIX}_FIND_STRATEGY}:${${_PYTHON_PREFIX}_FIND_VIRTUALENV}${_${_PYTHON_PREFIX}_FIND_UNVERSIONED_NAMES}")
if (NOT WIN32)
string (APPEND _${_PYTHON_PREFIX}_SIGNATURE ":${${_PYTHON_PREFIX}_USE_STATIC_LIBS}:")
endif()
if (CMAKE_HOST_APPLE)
string (APPEND _${_PYTHON_PREFIX}_SIGNATURE ":${_${_PYTHON_PREFIX}_FIND_FRAMEWORK}")
endif()
if (CMAKE_HOST_WIN32)
string (APPEND _${_PYTHON_PREFIX}_SIGNATURE ":${_${_PYTHON_PREFIX}_FIND_REGISTRY}")
endif()
function (_PYTHON_CHECK_DEVELOPMENT_SIGNATURE module)
if ("Development.${module}" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
string (TOUPPER "${module}" id)
set (signature "${_${_PYTHON_PREFIX}_SIGNATURE}:")
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS)
list (APPEND signature "${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}:")
endif()
if ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS)
list (APPEND signature "${_${_PYTHON_PREFIX}_INCLUDE_DIR}:")
endif()
string (MD5 signature "${signature}")
if (signature STREQUAL _${_PYTHON_PREFIX}_DEVELOPMENT_${id}_SIGNATURE)
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS)
if (${_PYTHON_PREFIX}_FIND_VERSION_EXACT)
_python_validate_library (VERSION ${${_PYTHON_PREFIX}_FIND_VERSION} EXACT CHECK_EXISTS)
elseif (${_PYTHON_PREFIX}_FIND_VERSION_RANGE)
_python_validate_library (IN_RANGE CHECK_EXISTS)
elseif (DEFINED ${_PYTHON_PREFIX}_FIND_VERSION)
_python_validate_library (VERSION ${${_PYTHON_PREFIX}_FIND_VERSION} CHECK_EXISTS)
else()
_python_validate_library (CHECK_EXISTS)
endif()
endif()
if ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS)
if (${_PYTHON_PREFIX}_FIND_VERSION_EXACT)
_python_validate_include_dir (VERSION ${${_PYTHON_PREFIX}_FIND_VERSION} EXACT CHECK_EXISTS)
elseif (${_PYTHON_PREFIX}_FIND_VERSION_RANGE)
_python_validate_include_dir (IN_RANGE CHECK_EXISTS)
elseif (${_PYTHON_PREFIX}_FIND_VERSION)
_python_validate_include_dir (VERSION ${${_PYTHON_PREFIX}_FIND_VERSION} CHECK_EXISTS)
else()
_python_validate_include_dir (CHECK_EXISTS)
endif()
endif()
else()
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS)
unset (_${_PYTHON_PREFIX}_LIBRARY_RELEASE CACHE)
unset (_${_PYTHON_PREFIX}_LIBRARY_DEBUG CACHE)
endif()
if ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS)
unset (_${_PYTHON_PREFIX}_INCLUDE_DIR CACHE)
endif()
endif()
if (("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS
AND NOT _${_PYTHON_PREFIX}_LIBRARY_RELEASE)
OR ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS
AND NOT _${_PYTHON_PREFIX}_INCLUDE_DIR))
unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
unset (_${_PYTHON_PREFIX}_DEVELOPMENT_${id}_SIGNATURE CACHE)
endif()
endif()
endfunction()
function (_PYTHON_COMPUTE_DEVELOPMENT_SIGNATURE module)
string (TOUPPER "${module}" id)
if (${_PYTHON_PREFIX}_Development.${module}_FOUND)
set (signature "${_${_PYTHON_PREFIX}_SIGNATURE}:")
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS)
list (APPEND signature "${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}:")
endif()
if ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_${id}_ARTIFACTS)
list (APPEND signature "${_${_PYTHON_PREFIX}_INCLUDE_DIR}:")
endif()
string (MD5 signature "${signature}")
set (_${_PYTHON_PREFIX}_DEVELOPMENT_${id}_SIGNATURE "${signature}" CACHE INTERNAL "")
else()
unset (_${_PYTHON_PREFIX}_DEVELOPMENT_${id}_SIGNATURE CACHE)
endif()
endfunction()
unset (_${_PYTHON_PREFIX}_REQUIRED_VARS)
unset (_${_PYTHON_PREFIX}_CACHED_VARS)
unset (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE)
unset (_${_PYTHON_PREFIX}_Compiler_REASON_FAILURE)
unset (_${_PYTHON_PREFIX}_Development_REASON_FAILURE)
unset (_${_PYTHON_PREFIX}_NumPy_REASON_FAILURE)
# preamble
## For IronPython on platforms other than Windows, search for the .Net interpreter
if ("IronPython" IN_LIST _${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS
AND NOT WIN32)
find_program (${_PYTHON_PREFIX}_DOTNET_LAUNCHER
NAMES "mono")
endif()
# first step, search for the interpreter
if ("Interpreter" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
list (APPEND _${_PYTHON_PREFIX}_CACHED_VARS _${_PYTHON_PREFIX}_EXECUTABLE
_${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES)
if (${_PYTHON_PREFIX}_FIND_REQUIRED_Interpreter)
list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_EXECUTABLE)
endif()
if (DEFINED ${_PYTHON_PREFIX}_EXECUTABLE
AND IS_ABSOLUTE "${${_PYTHON_PREFIX}_EXECUTABLE}")
if (NOT ${_PYTHON_PREFIX}_EXECUTABLE STREQUAL _${_PYTHON_PREFIX}_EXECUTABLE)
# invalidate cache properties
unset (_${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES CACHE)
endif()
set (_${_PYTHON_PREFIX}_EXECUTABLE "${${_PYTHON_PREFIX}_EXECUTABLE}" CACHE INTERNAL "")
elseif (DEFINED _${_PYTHON_PREFIX}_EXECUTABLE)
# compute interpreter signature and check validity of definition
string (MD5 __${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE "${_${_PYTHON_PREFIX}_SIGNATURE}:${_${_PYTHON_PREFIX}_EXECUTABLE}")
if (__${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE STREQUAL _${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE)
# check version validity
if (${_PYTHON_PREFIX}_FIND_VERSION_EXACT)
_python_validate_interpreter (VERSION ${${_PYTHON_PREFIX}_FIND_VERSION} EXACT CHECK_EXISTS)
elseif (${_PYTHON_PREFIX}_FIND_VERSION_RANGE)
_python_validate_interpreter (IN_RANGE CHECK_EXISTS)
elseif (DEFINED ${_PYTHON_PREFIX}_FIND_VERSION)
_python_validate_interpreter (VERSION ${${_PYTHON_PREFIX}_FIND_VERSION} CHECK_EXISTS)
else()
_python_validate_interpreter (CHECK_EXISTS)
endif()
else()
unset (_${_PYTHON_PREFIX}_EXECUTABLE CACHE)
endif()
if (NOT _${_PYTHON_PREFIX}_EXECUTABLE)
unset (_${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE CACHE)
unset (_${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES CACHE)
endif()
endif()
if (NOT _${_PYTHON_PREFIX}_EXECUTABLE)
set (_${_PYTHON_PREFIX}_HINTS "${${_PYTHON_PREFIX}_ROOT_DIR}" ENV ${_PYTHON_PREFIX}_ROOT_DIR)
if (_${_PYTHON_PREFIX}_FIND_STRATEGY STREQUAL "LOCATION")
# build all executable names
_python_get_names (_${_PYTHON_PREFIX}_NAMES VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS} POSIX INTERPRETER)
_python_get_path_suffixes (_${_PYTHON_PREFIX}_PATH_SUFFIXES VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS} INTERPRETER)
# Framework Paths
_python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS})
# Registry Paths
_python_get_registries (_${_PYTHON_PREFIX}_REGISTRY_PATHS VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS})
set (_${_PYTHON_PREFIX}_VALIDATE_OPTIONS ${_${_PYTHON_PREFIX}_FIND_VERSION_EXACT})
if (${_PYTHON_PREFIX}_FIND_VERSION_RANGE)
list (APPEND _${_PYTHON_PREFIX}_VALIDATE_OPTIONS IN_RANGE)
elseif (DEFINED ${_PYTHON_PREFIX}_FIND_VERSION)
list (APPEND _${_PYTHON_PREFIX}_VALIDATE_OPTIONS VERSION ${${_PYTHON_PREFIX}_FIND_VERSION})
endif()
while (TRUE)
# Virtual environments handling
if (_${_PYTHON_PREFIX}_FIND_VIRTUALENV MATCHES "^(FIRST|ONLY)$")
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ENV VIRTUAL_ENV ENV CONDA_PREFIX
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_interpreter (${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
if (_${_PYTHON_PREFIX}_FIND_VIRTUALENV STREQUAL "ONLY")
break()
endif()
endif()
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_interpreter (${${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
endif()
# Windows registry
if (CMAKE_HOST_WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_interpreter (${${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
endif()
# try using HINTS
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_interpreter (${${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
# try using standard paths
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES})
_python_validate_interpreter (${${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_DEFAULT_PATH)
_python_validate_interpreter (${${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
endif()
# Windows registry
if (CMAKE_HOST_WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "LAST")
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_DEFAULT_PATH)
_python_validate_interpreter (${${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
endif()
break()
endwhile()
else()
# look-up for various versions and locations
set (_${_PYTHON_PREFIX}_VALIDATE_OPTIONS EXACT)
if (${_PYTHON_PREFIX}_FIND_VERSION_RANGE)
list (APPEND _${_PYTHON_PREFIX}_VALIDATE_OPTIONS IN_RANGE)
endif()
foreach (_${_PYTHON_PREFIX}_VERSION IN LISTS _${_PYTHON_PREFIX}_FIND_VERSIONS)
_python_get_names (_${_PYTHON_PREFIX}_NAMES VERSION ${_${_PYTHON_PREFIX}_VERSION} POSIX INTERPRETER)
_python_get_path_suffixes (_${_PYTHON_PREFIX}_PATH_SUFFIXES VERSION ${_${_PYTHON_PREFIX}_VERSION} INTERPRETER)
_python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS VERSION ${_${_PYTHON_PREFIX}_VERSION})
_python_get_registries (_${_PYTHON_PREFIX}_REGISTRY_PATHS VERSION ${_${_PYTHON_PREFIX}_VERSION})
# Virtual environments handling
if (_${_PYTHON_PREFIX}_FIND_VIRTUALENV MATCHES "^(FIRST|ONLY)$")
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ENV VIRTUAL_ENV ENV CONDA_PREFIX
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_interpreter (VERSION ${_${_PYTHON_PREFIX}_VERSION} ${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
if (_${_PYTHON_PREFIX}_FIND_VIRTUALENV STREQUAL "ONLY")
continue()
endif()
endif()
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
# Windows registry
if (CMAKE_HOST_WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
_python_validate_interpreter (VERSION ${_${_PYTHON_PREFIX}_VERSION} ${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
# try using HINTS
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_interpreter (VERSION ${_${_PYTHON_PREFIX}_VERSION} ${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
# try using standard paths.
# NAMES_PER_DIR is not defined on purpose to have a chance to find
# expected version.
# For example, typical systems have 'python' for version 2.* and 'python3'
# for version 3.*. So looking for names per dir will find, potentially,
# systematically 'python' (i.e. version 2) even if version 3 is searched.
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES})
_python_validate_interpreter (VERSION ${_${_PYTHON_PREFIX}_VERSION} ${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_DEFAULT_PATH)
endif()
# Windows registry
if (CMAKE_HOST_WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "LAST")
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES}
NAMES_PER_DIR
PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_DEFAULT_PATH)
endif()
_python_validate_interpreter (VERSION ${_${_PYTHON_PREFIX}_VERSION} ${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_EXECUTABLE)
break()
endif()
endforeach()
if (NOT _${_PYTHON_PREFIX}_EXECUTABLE AND
NOT _${_PYTHON_PREFIX}_FIND_VIRTUALENV STREQUAL "ONLY")
# No specific version found. Retry with generic names and standard paths.
# NAMES_PER_DIR is not defined on purpose to have a chance to find
# expected version.
# For example, typical systems have 'python' for version 2.* and 'python3'
# for version 3.*. So looking for names per dir will find, potentially,
# systematically 'python' (i.e. version 2) even if version 3 is searched.
_python_get_names (_${_PYTHON_PREFIX}_NAMES POSIX INTERPRETER)
find_program (_${_PYTHON_PREFIX}_EXECUTABLE
NAMES ${_${_PYTHON_PREFIX}_NAMES})
_python_validate_interpreter ()
endif()
endif()
endif()
set (${_PYTHON_PREFIX}_EXECUTABLE "${_${_PYTHON_PREFIX}_EXECUTABLE}")
_python_get_launcher (_${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER INTERPRETER)
# retrieve exact version of executable found
if (_${_PYTHON_PREFIX}_EXECUTABLE)
execute_process (COMMAND ${_${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys; sys.stdout.write('.'.join([str(x) for x in sys.version_info[:3]]))"
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE ${_PYTHON_PREFIX}_VERSION
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (NOT _${_PYTHON_PREFIX}_RESULT)
set (_${_PYTHON_PREFIX}_EXECUTABLE_USABLE TRUE)
else()
# Interpreter is not usable
set (_${_PYTHON_PREFIX}_EXECUTABLE_USABLE FALSE)
unset (${_PYTHON_PREFIX}_VERSION)
set (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE "Cannot run the interpreter \"${_${_PYTHON_PREFIX}_EXECUTABLE}\"")
endif()
endif()
if (_${_PYTHON_PREFIX}_EXECUTABLE AND _${_PYTHON_PREFIX}_EXECUTABLE_USABLE)
if (_${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES)
set (${_PYTHON_PREFIX}_Interpreter_FOUND TRUE)
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 0 ${_PYTHON_PREFIX}_INTERPRETER_ID)
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 1 ${_PYTHON_PREFIX}_VERSION_MAJOR)
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 2 ${_PYTHON_PREFIX}_VERSION_MINOR)
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 3 ${_PYTHON_PREFIX}_VERSION_PATCH)
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 4 _${_PYTHON_PREFIX}_ARCH)
set (_${_PYTHON_PREFIX}_ARCH2 ${_${_PYTHON_PREFIX}_ARCH})
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 5 _${_PYTHON_PREFIX}_ABIFLAGS)
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 6 ${_PYTHON_PREFIX}_SOABI)
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 7 ${_PYTHON_PREFIX}_STDLIB)
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 8 ${_PYTHON_PREFIX}_STDARCH)
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 9 ${_PYTHON_PREFIX}_SITELIB)
list (GET _${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES 10 ${_PYTHON_PREFIX}_SITEARCH)
else()
string (REGEX MATCHALL "[0-9]+" _${_PYTHON_PREFIX}_VERSIONS "${${_PYTHON_PREFIX}_VERSION}")
list (GET _${_PYTHON_PREFIX}_VERSIONS 0 ${_PYTHON_PREFIX}_VERSION_MAJOR)
list (GET _${_PYTHON_PREFIX}_VERSIONS 1 ${_PYTHON_PREFIX}_VERSION_MINOR)
list (GET _${_PYTHON_PREFIX}_VERSIONS 2 ${_PYTHON_PREFIX}_VERSION_PATCH)
if (${_PYTHON_PREFIX}_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
set (${_PYTHON_PREFIX}_Interpreter_FOUND TRUE)
# Use interpreter version and ABI for future searches to ensure consistency
set (_${_PYTHON_PREFIX}_FIND_VERSIONS ${${_PYTHON_PREFIX}_VERSION_MAJOR}.${${_PYTHON_PREFIX}_VERSION_MINOR})
execute_process (COMMAND ${_${_PYTHON_PREFIX}_INTERPRETR_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys; sys.stdout.write(sys.abiflags)"
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE _${_PYTHON_PREFIX}_ABIFLAGS
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_${_PYTHON_PREFIX}_RESULT)
# assunme ABI is not supported
set (_${_PYTHON_PREFIX}_ABIFLAGS "")
endif()
endif()
if (${_PYTHON_PREFIX}_Interpreter_FOUND)
unset (_${_PYTHON_PREFIX}_Interpreter_REASON_FAILURE)
# compute and save interpreter signature
string (MD5 __${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE "${_${_PYTHON_PREFIX}_SIGNATURE}:${_${_PYTHON_PREFIX}_EXECUTABLE}")
set (_${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE "${__${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE}" CACHE INTERNAL "")
if (NOT CMAKE_SIZEOF_VOID_P)
# determine interpreter architecture
execute_process (COMMAND ${_${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys; sys.stdout.write(str(sys.maxsize > 2**32))"
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE ${_PYTHON_PREFIX}_IS64BIT
ERROR_VARIABLE ${_PYTHON_PREFIX}_IS64BIT)
if (NOT _${_PYTHON_PREFIX}_RESULT)
if (${_PYTHON_PREFIX}_IS64BIT)
set (_${_PYTHON_PREFIX}_ARCH 64)
set (_${_PYTHON_PREFIX}_ARCH2 64)
else()
set (_${_PYTHON_PREFIX}_ARCH 32)
set (_${_PYTHON_PREFIX}_ARCH2 32)
endif()
endif()
endif()
# retrieve interpreter identity
execute_process (COMMAND ${_${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -V
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE ${_PYTHON_PREFIX}_INTERPRETER_ID
ERROR_VARIABLE ${_PYTHON_PREFIX}_INTERPRETER_ID)
if (NOT _${_PYTHON_PREFIX}_RESULT)
if (${_PYTHON_PREFIX}_INTERPRETER_ID MATCHES "Anaconda")
set (${_PYTHON_PREFIX}_INTERPRETER_ID "Anaconda")
elseif (${_PYTHON_PREFIX}_INTERPRETER_ID MATCHES "Enthought")
set (${_PYTHON_PREFIX}_INTERPRETER_ID "Canopy")
elseif (${_PYTHON_PREFIX}_INTERPRETER_ID MATCHES "PyPy ([0-9.]+)")
set (${_PYTHON_PREFIX}_INTERPRETER_ID "PyPy")
set (${_PYTHON_PREFIX}_PyPy_VERSION "${CMAKE_MATCH_1}")
else()
string (REGEX REPLACE "^([^ ]+).*" "\\1" ${_PYTHON_PREFIX}_INTERPRETER_ID "${${_PYTHON_PREFIX}_INTERPRETER_ID}")
if (${_PYTHON_PREFIX}_INTERPRETER_ID STREQUAL "Python")
# try to get a more precise ID
execute_process (COMMAND ${_${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys; sys.stdout.write(sys.copyright)"
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE ${_PYTHON_PREFIX}_COPYRIGHT
ERROR_QUIET)
if (${_PYTHON_PREFIX}_COPYRIGHT MATCHES "ActiveState")
set (${_PYTHON_PREFIX}_INTERPRETER_ID "ActivePython")
endif()
endif()
endif()
else()
set (${_PYTHON_PREFIX}_INTERPRETER_ID Python)
endif()
# retrieve various package installation directories
execute_process (COMMAND ${_${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys\ntry:\n from distutils import sysconfig\n sys.stdout.write(';'.join([sysconfig.get_python_lib(plat_specific=False,standard_lib=True),sysconfig.get_python_lib(plat_specific=True,standard_lib=True),sysconfig.get_python_lib(plat_specific=False,standard_lib=False),sysconfig.get_python_lib(plat_specific=True,standard_lib=False)]))\nexcept Exception:\n import sysconfig\n sys.stdout.write(';'.join([sysconfig.get_path('stdlib'),sysconfig.get_path('platstdlib'),sysconfig.get_path('purelib'),sysconfig.get_path('platlib')]))"
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE _${_PYTHON_PREFIX}_LIBPATHS
ERROR_QUIET)
if (NOT _${_PYTHON_PREFIX}_RESULT)
list (GET _${_PYTHON_PREFIX}_LIBPATHS 0 ${_PYTHON_PREFIX}_STDLIB)
list (GET _${_PYTHON_PREFIX}_LIBPATHS 1 ${_PYTHON_PREFIX}_STDARCH)
list (GET _${_PYTHON_PREFIX}_LIBPATHS 2 ${_PYTHON_PREFIX}_SITELIB)
list (GET _${_PYTHON_PREFIX}_LIBPATHS 3 ${_PYTHON_PREFIX}_SITEARCH)
else()
unset (${_PYTHON_PREFIX}_STDLIB)
unset (${_PYTHON_PREFIX}_STDARCH)
unset (${_PYTHON_PREFIX}_SITELIB)
unset (${_PYTHON_PREFIX}_SITEARCH)
endif()
_python_get_config_var (${_PYTHON_PREFIX}_SOABI SOABI)
# store properties in the cache to speed-up future searches
set (_${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES
"${${_PYTHON_PREFIX}_INTERPRETER_ID};${${_PYTHON_PREFIX}_VERSION_MAJOR};${${_PYTHON_PREFIX}_VERSION_MINOR};${${_PYTHON_PREFIX}_VERSION_PATCH};${_${_PYTHON_PREFIX}_ARCH};${_${_PYTHON_PREFIX}_ABIFLAGS};${${_PYTHON_PREFIX}_SOABI};${${_PYTHON_PREFIX}_STDLIB};${${_PYTHON_PREFIX}_STDARCH};${${_PYTHON_PREFIX}_SITELIB};${${_PYTHON_PREFIX}_SITEARCH}" CACHE INTERNAL "${_PYTHON_PREFIX} Properties")
else()
unset (_${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE CACHE)
unset (${_PYTHON_PREFIX}_INTERPRETER_ID)
endif()
endif()
endif()
if (${_PYTHON_PREFIX}_ARTIFACTS_INTERACTIVE)
set (${_PYTHON_PREFIX}_EXECUTABLE "${_${_PYTHON_PREFIX}_EXECUTABLE}" CACHE FILEPATH "${_PYTHON_PREFIX} Interpreter")
endif()
_python_mark_as_internal (_${_PYTHON_PREFIX}_EXECUTABLE
_${_PYTHON_PREFIX}_INTERPRETER_PROPERTIES
_${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE)
endif()
# second step, search for compiler (IronPython)
if ("Compiler" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
list (APPEND _${_PYTHON_PREFIX}_CACHED_VARS _${_PYTHON_PREFIX}_COMPILER)
if (${_PYTHON_PREFIX}_FIND_REQUIRED_Compiler)
list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_COMPILER)
endif()
if (NOT "IronPython" IN_LIST _${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS)
unset (_${_PYTHON_PREFIX}_COMPILER CACHE)
unset (_${_PYTHON_PREFIX}_COMPILER_SIGNATURE CACHE)
elseif (DEFINED ${_PYTHON_PREFIX}_COMPILER
AND IS_ABSOLUTE "${${_PYTHON_PREFIX}_COMPILER}")
set (_${_PYTHON_PREFIX}_COMPILER "${${_PYTHON_PREFIX}_COMPILER}" CACHE INTERNAL "")
elseif (DEFINED _${_PYTHON_PREFIX}_COMPILER)
# compute compiler signature and check validity of definition
string (MD5 __${_PYTHON_PREFIX}_COMPILER_SIGNATURE "${_${_PYTHON_PREFIX}_SIGNATURE}:${_${_PYTHON_PREFIX}_COMPILER}")
if (__${_PYTHON_PREFIX}_COMPILER_SIGNATURE STREQUAL _${_PYTHON_PREFIX}_COMPILER_SIGNATURE)
# check version validity
if (${_PYTHON_PREFIX}_FIND_VERSION_EXACT)
_python_validate_compiler (VERSION ${${_PYTHON_PREFIX}_FIND_VERSION} EXACT CHECK_EXISTS)
elseif (${_PYTHON_PREFIX}_FIND_VERSION_RANGE)
_python_validate_compiler (IN_RANGE CHECK_EXISTS)
elseif (DEFINED ${_PYTHON_PREFIX}_FIND_VERSION)
_python_validate_compiler (VERSION ${${_PYTHON_PREFIX}_FIND_VERSION} CHECK_EXISTS)
else()
_python_validate_compiler (CHECK_EXISTS)
endif()
else()
unset (_${_PYTHON_PREFIX}_COMPILER CACHE)
unset (_${_PYTHON_PREFIX}_COMPILER_SIGNATURE CACHE)
endif()
endif()
if ("IronPython" IN_LIST _${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS
AND NOT _${_PYTHON_PREFIX}_COMPILER)
# IronPython specific artifacts
# If IronPython interpreter is found, use its path
unset (_${_PYTHON_PREFIX}_IRON_ROOT)
if (${_PYTHON_PREFIX}_Interpreter_FOUND AND ${_PYTHON_PREFIX}_INTERPRETER_ID STREQUAL "IronPython")
get_filename_component (_${_PYTHON_PREFIX}_IRON_ROOT "${${_PYTHON_PREFIX}_EXECUTABLE}" DIRECTORY)
endif()
if (_${_PYTHON_PREFIX}_FIND_STRATEGY STREQUAL "LOCATION")
_python_get_names (_${_PYTHON_PREFIX}_COMPILER_NAMES
IMPLEMENTATIONS IronPython
VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS}
COMPILER)
_python_get_path_suffixes (_${_PYTHON_PREFIX}_PATH_SUFFIXES
IMPLEMENTATIONS IronPython
VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS}
COMPILER)
_python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS
IMPLEMENTATIONS IronPython
VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS})
_python_get_registries (_${_PYTHON_PREFIX}_REGISTRY_PATHS
IMPLEMENTATIONS IronPython
VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS})
set (_${_PYTHON_PREFIX}_VALIDATE_OPTIONS ${_${_PYTHON_PREFIX}_FIND_VERSION_EXACT})
if (${_PYTHON_PREFIX}_FIND_VERSION_RANGE)
list (APPEND _${_PYTHON_PREFIX}_VALIDATE_OPTIONS IN_RANGE)
elseif (DEFINED ${_PYTHON_PREFIX}_FIND_VERSION)
list (APPEND _${_PYTHON_PREFIX}_VALIDATE_OPTIONS VERSION ${${_PYTHON_PREFIX}_FIND_VERSION})
endif()
while (TRUE)
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_IRON_ROOT} ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_compiler (${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
endif()
# Windows registry
if (CMAKE_HOST_WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_IRON_ROOT} ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_compiler (${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
endif()
# try using HINTS
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_IRON_ROOT} ${_${_PYTHON_PREFIX}_HINTS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_compiler (${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
# try using standard paths
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES})
_python_validate_compiler (${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_DEFAULT_PATH)
_python_validate_compiler (${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
endif()
# Windows registry
if (CMAKE_HOST_WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "LAST")
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_DEFAULT_PATH)
_python_validate_compiler (${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
endif()
break()
endwhile()
else()
# try using root dir and registry
set (_${_PYTHON_PREFIX}_VALIDATE_OPTIONS EXACT)
if (${_PYTHON_PREFIX}_FIND_VERSION_RANGE)
list (APPEND _${_PYTHON_PREFIX}_VALIDATE_OPTIONS IN_RANGE)
endif()
foreach (_${_PYTHON_PREFIX}_VERSION IN LISTS _${_PYTHON_PREFIX}_FIND_VERSIONS)
_python_get_names (_${_PYTHON_PREFIX}_COMPILER_NAMES
IMPLEMENTATIONS IronPython
VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS}
COMPILER)
_python_get_path_suffixes (_${_PYTHON_PREFIX}_PATH_SUFFIXES
IMPLEMENTATIONS IronPython
VERSION ${_${_PYTHON_PREFIX}_FIND_VERSION}
COMPILER)
_python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS
IMPLEMENTATIONS IronPython
VERSION ${_${_PYTHON_PREFIX}_VERSION})
_python_get_registries (_${_PYTHON_PREFIX}_REGISTRY_PATHS
IMPLEMENTATIONS IronPython
VERSION ${_${_PYTHON_PREFIX}_VERSION})
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_IRON_ROOT} ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_compiler (VERSION ${_${_PYTHON_PREFIX}_VERSION} ${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
endif()
# Windows registry
if (CMAKE_HOST_WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_IRON_ROOT} ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_compiler (VERSION ${_${_PYTHON_PREFIX}_VERSION} ${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
endif()
# try using HINTS
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_IRON_ROOT} ${_${_PYTHON_PREFIX}_HINTS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
_python_validate_compiler (VERSION ${_${_PYTHON_PREFIX}_VERSION} ${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_DEFAULT_PATH)
_python_validate_compiler (VERSION ${_${_PYTHON_PREFIX}_VERSION} ${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
endif()
# Windows registry
if (CMAKE_HOST_WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "LAST")
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
NAMES_PER_DIR
PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_DEFAULT_PATH)
_python_validate_compiler (VERSION ${_${_PYTHON_PREFIX}_VERSION} ${_${_PYTHON_PREFIX}_VALIDATE_OPTIONS})
if (_${_PYTHON_PREFIX}_COMPILER)
break()
endif()
endif()
endforeach()
# no specific version found, re-try in standard paths
_python_get_names (_${_PYTHON_PREFIX}_COMPILER_NAMES
IMPLEMENTATIONS IronPython
VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS}
COMPILER)
_python_get_path_suffixes (_${_PYTHON_PREFIX}_PATH_SUFFIXES
IMPLEMENTATIONS IronPython
VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS}
COMPILER)
find_program (_${_PYTHON_PREFIX}_COMPILER
NAMES ${_${_PYTHON_PREFIX}_COMPILER_NAMES}
HINTS ${_${_PYTHON_PREFIX}_IRON_ROOT} ${_${_PYTHON_PREFIX}_HINTS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES})
_python_validate_compiler ()
endif()
endif()
set (${_PYTHON_PREFIX}_COMPILER "${_${_PYTHON_PREFIX}_COMPILER}")
if (_${_PYTHON_PREFIX}_COMPILER)
# retrieve python environment version from compiler
_python_get_launcher (_${_PYTHON_PREFIX}_COMPILER_LAUNCHER COMPILER)
set (_${_PYTHON_PREFIX}_VERSION_DIR "${CMAKE_CURRENT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/PythonCompilerVersion.dir")
file (WRITE "${_${_PYTHON_PREFIX}_VERSION_DIR}/version.py" "import sys; sys.stdout.write('.'.join([str(x) for x in sys.version_info[:3]]))\n")
execute_process (COMMAND ${_${_PYTHON_PREFIX}_COMPILER_LAUNCHER} "${_${_PYTHON_PREFIX}_COMPILER}"
${_${_PYTHON_PREFIX}_IRON_PYTHON_COMPILER_ARCH_FLAGS}
/target:exe /embed "${_${_PYTHON_PREFIX}_VERSION_DIR}/version.py"
WORKING_DIRECTORY "${_${_PYTHON_PREFIX}_VERSION_DIR}"
OUTPUT_QUIET
ERROR_QUIET)
get_filename_component (_${_PYTHON_PREFIX}_IR_DIR "${_${_PYTHON_PREFIX}_COMPILER}" DIRECTORY)
execute_process (COMMAND "${CMAKE_COMMAND}" -E env "MONO_PATH=${_${_PYTHON_PREFIX}_IR_DIR}"
${${_PYTHON_PREFIX}_DOTNET_LAUNCHER} "${_${_PYTHON_PREFIX}_VERSION_DIR}/version.exe"
WORKING_DIRECTORY "${_${_PYTHON_PREFIX}_VERSION_DIR}"
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE _${_PYTHON_PREFIX}_VERSION
ERROR_QUIET)
if (NOT _${_PYTHON_PREFIX}_RESULT)
set (_${_PYTHON_PREFIX}_COMPILER_USABLE TRUE)
string (REGEX MATCHALL "[0-9]+" _${_PYTHON_PREFIX}_VERSIONS "${_${_PYTHON_PREFIX}_VERSION}")
list (GET _${_PYTHON_PREFIX}_VERSIONS 0 _${_PYTHON_PREFIX}_VERSION_MAJOR)
list (GET _${_PYTHON_PREFIX}_VERSIONS 1 _${_PYTHON_PREFIX}_VERSION_MINOR)
list (GET _${_PYTHON_PREFIX}_VERSIONS 2 _${_PYTHON_PREFIX}_VERSION_PATCH)
if (NOT ${_PYTHON_PREFIX}_Interpreter_FOUND)
# set public version information
set (${_PYTHON_PREFIX}_VERSION ${_${_PYTHON_PREFIX}_VERSION})
set (${_PYTHON_PREFIX}_VERSION_MAJOR ${_${_PYTHON_PREFIX}_VERSION_MAJOR})
set (${_PYTHON_PREFIX}_VERSION_MINOR ${_${_PYTHON_PREFIX}_VERSION_MINOR})
set (${_PYTHON_PREFIX}_VERSION_PATCH ${_${_PYTHON_PREFIX}_VERSION_PATCH})
endif()
else()
# compiler not usable
set (_${_PYTHON_PREFIX}_COMPILER_USABLE FALSE)
set (_${_PYTHON_PREFIX}_Compiler_REASON_FAILURE "Cannot run the compiler \"${_${_PYTHON_PREFIX}_COMPILER}\"")
endif()
file (REMOVE_RECURSE "${_${_PYTHON_PREFIX}_VERSION_DIR}")
endif()
if (_${_PYTHON_PREFIX}_COMPILER AND _${_PYTHON_PREFIX}_COMPILER_USABLE)
if (${_PYTHON_PREFIX}_Interpreter_FOUND)
# Compiler must be compatible with interpreter
if ("${_${_PYTHON_PREFIX}_VERSION_MAJOR}.${_${_PYTHON_PREFIX}_VERSION_MINOR}" VERSION_EQUAL "${${_PYTHON_PREFIX}_VERSION_MAJOR}.${${_PYTHON_PREFIX}_VERSION_MINOR}")
set (${_PYTHON_PREFIX}_Compiler_FOUND TRUE)
endif()
elseif (${_PYTHON_PREFIX}_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
set (${_PYTHON_PREFIX}_Compiler_FOUND TRUE)
# Use compiler version for future searches to ensure consistency
set (_${_PYTHON_PREFIX}_FIND_VERSIONS ${${_PYTHON_PREFIX}_VERSION_MAJOR}.${${_PYTHON_PREFIX}_VERSION_MINOR})
endif()
endif()
if (${_PYTHON_PREFIX}_Compiler_FOUND)
unset (_${_PYTHON_PREFIX}_Compiler_REASON_FAILURE)
# compute and save compiler signature
string (MD5 __${_PYTHON_PREFIX}_COMPILER_SIGNATURE "${_${_PYTHON_PREFIX}_SIGNATURE}:${_${_PYTHON_PREFIX}_COMPILER}")
set (_${_PYTHON_PREFIX}_COMPILER_SIGNATURE "${__${_PYTHON_PREFIX}_COMPILER_SIGNATURE}" CACHE INTERNAL "")
set (${_PYTHON_PREFIX}_COMPILER_ID IronPython)
else()
unset (_${_PYTHON_PREFIX}_COMPILER_SIGNATURE CACHE)
unset (${_PYTHON_PREFIX}_COMPILER_ID)
endif()
if (${_PYTHON_PREFIX}_ARTIFACTS_INTERACTIVE)
set (${_PYTHON_PREFIX}_COMPILER "${_${_PYTHON_PREFIX}_COMPILER}" CACHE FILEPATH "${_PYTHON_PREFIX} Compiler")
endif()
_python_mark_as_internal (_${_PYTHON_PREFIX}_COMPILER
_${_PYTHON_PREFIX}_COMPILER_SIGNATURE)
endif()
# third step, search for the development artifacts
if (${_PYTHON_PREFIX}_FIND_REQUIRED_Development.Module)
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_MODULE_ARTIFACTS)
list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_LIBRARIES)
endif()
if ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_MODULE_ARTIFACTS)
list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_INCLUDE_DIRS)
endif()
endif()
if (${_PYTHON_PREFIX}_FIND_REQUIRED_Development.Embed)
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_EMBED_ARTIFACTS)
list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_LIBRARIES)
endif()
if ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_EMBED_ARTIFACTS)
list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_INCLUDE_DIRS)
endif()
endif()
list (REMOVE_DUPLICATES _${_PYTHON_PREFIX}_REQUIRED_VARS)
## Development environment is not compatible with IronPython interpreter
if (("Development.Module" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
OR "Development.Embed" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS)
AND ((${_PYTHON_PREFIX}_Interpreter_FOUND
AND NOT ${_PYTHON_PREFIX}_INTERPRETER_ID STREQUAL "IronPython")
OR NOT ${_PYTHON_PREFIX}_Interpreter_FOUND))
if (${_PYTHON_PREFIX}_Interpreter_FOUND)
# reduce possible implementations to the interpreter one
if (${_PYTHON_PREFIX}_INTERPRETER_ID STREQUAL "PyPy")
set (_${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS "PyPy")
else()
set (_${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS "CPython")
endif()
else()
list (REMOVE_ITEM _${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS "IronPython")
endif()
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS)
list (APPEND _${_PYTHON_PREFIX}_CACHED_VARS _${_PYTHON_PREFIX}_LIBRARY_RELEASE
_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE
_${_PYTHON_PREFIX}_LIBRARY_DEBUG
_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG)
endif()
if ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS)
list (APPEND _${_PYTHON_PREFIX}_CACHED_VARS _${_PYTHON_PREFIX}_INCLUDE_DIR)
endif()
_python_check_development_signature (Module)
_python_check_development_signature (Embed)
if (DEFINED ${_PYTHON_PREFIX}_LIBRARY
AND IS_ABSOLUTE "${${_PYTHON_PREFIX}_LIBRARY}")
set (_${_PYTHON_PREFIX}_LIBRARY_RELEASE "${${_PYTHON_PREFIX}_LIBRARY}" CACHE INTERNAL "")
unset (_${_PYTHON_PREFIX}_LIBRARY_DEBUG CACHE)
unset (_${_PYTHON_PREFIX}_INCLUDE_DIR CACHE)
endif()
if (DEFINED ${_PYTHON_PREFIX}_INCLUDE_DIR
AND IS_ABSOLUTE "${${_PYTHON_PREFIX}_INCLUDE_DIR}")
set (_${_PYTHON_PREFIX}_INCLUDE_DIR "${${_PYTHON_PREFIX}_INCLUDE_DIR}" CACHE INTERNAL "")
endif()
# Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES
unset (_${_PYTHON_PREFIX}_CMAKE_FIND_LIBRARY_SUFFIXES)
if (DEFINED ${_PYTHON_PREFIX}_USE_STATIC_LIBS AND NOT WIN32)
set(_${_PYTHON_PREFIX}_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
if(${_PYTHON_PREFIX}_USE_STATIC_LIBS)
set (CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_STATIC_LIBRARY_SUFFIX})
else()
list (REMOVE_ITEM CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_STATIC_LIBRARY_SUFFIX})
endif()
endif()
if (NOT _${_PYTHON_PREFIX}_LIBRARY_RELEASE OR NOT _${_PYTHON_PREFIX}_INCLUDE_DIR)
# if python interpreter is found, use it to look-up for artifacts
# to ensure consistency between interpreter and development environments.
# If not, try to locate a compatible config tool
if ((NOT ${_PYTHON_PREFIX}_Interpreter_FOUND OR CMAKE_CROSSCOMPILING)
AND "CPython" IN_LIST _${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS)
set (_${_PYTHON_PREFIX}_HINTS "${${_PYTHON_PREFIX}_ROOT_DIR}" ENV ${_PYTHON_PREFIX}_ROOT_DIR)
unset (_${_PYTHON_PREFIX}_VIRTUALENV_PATHS)
if (_${_PYTHON_PREFIX}_FIND_VIRTUALENV MATCHES "^(FIRST|ONLY)$")
set (_${_PYTHON_PREFIX}_VIRTUALENV_PATHS ENV VIRTUAL_ENV ENV CONDA_PREFIX)
endif()
if (_${_PYTHON_PREFIX}_FIND_STRATEGY STREQUAL "LOCATION")
_python_get_names (_${_PYTHON_PREFIX}_CONFIG_NAMES VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS} POSIX CONFIG)
# Framework Paths
_python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS})
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
find_program (_${_PYTHON_PREFIX}_CONFIG
NAMES ${_${_PYTHON_PREFIX}_CONFIG_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES bin
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
find_program (_${_PYTHON_PREFIX}_CONFIG
NAMES ${_${_PYTHON_PREFIX}_CONFIG_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
PATH_SUFFIXES bin)
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
find_program (_${_PYTHON_PREFIX}_CONFIG
NAMES ${_${_PYTHON_PREFIX}_CONFIG_NAMES}
NAMES_PER_DIR
PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES bin
NO_DEFAULT_PATH)
endif()
if (_${_PYTHON_PREFIX}_CONFIG)
execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --help
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE __${_PYTHON_PREFIX}_HELP
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_${_PYTHON_PREFIX}_RESULT)
# assume config tool is not usable
unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
endif()
endif()
if (_${_PYTHON_PREFIX}_CONFIG)
execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --abiflags
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE __${_PYTHON_PREFIX}_ABIFLAGS
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_${_PYTHON_PREFIX}_RESULT)
# assume ABI is not supported
set (__${_PYTHON_PREFIX}_ABIFLAGS "")
endif()
if (DEFINED _${_PYTHON_PREFIX}_FIND_ABI AND NOT __${_PYTHON_PREFIX}_ABIFLAGS IN_LIST _${_PYTHON_PREFIX}_ABIFLAGS)
# Wrong ABI
unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
endif()
endif()
if (_${_PYTHON_PREFIX}_CONFIG AND DEFINED CMAKE_LIBRARY_ARCHITECTURE)
# check that config tool match library architecture
execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --configdir
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE _${_PYTHON_PREFIX}_CONFIGDIR
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_${_PYTHON_PREFIX}_RESULT)
unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
else()
string(FIND "${_${_PYTHON_PREFIX}_CONFIGDIR}" "${CMAKE_LIBRARY_ARCHITECTURE}" _${_PYTHON_PREFIX}_RESULT)
if (_${_PYTHON_PREFIX}_RESULT EQUAL -1)
unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
endif()
endif()
endif()
else()
foreach (_${_PYTHON_PREFIX}_VERSION IN LISTS _${_PYTHON_PREFIX}_FIND_VERSIONS)
# try to use pythonX.Y-config tool
_python_get_names (_${_PYTHON_PREFIX}_CONFIG_NAMES VERSION ${_${_PYTHON_PREFIX}_VERSION} POSIX CONFIG)
# Framework Paths
_python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS VERSION ${_${_PYTHON_PREFIX}_VERSION})
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
find_program (_${_PYTHON_PREFIX}_CONFIG
NAMES ${_${_PYTHON_PREFIX}_CONFIG_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES bin
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
find_program (_${_PYTHON_PREFIX}_CONFIG
NAMES ${_${_PYTHON_PREFIX}_CONFIG_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
PATH_SUFFIXES bin)
# Apple frameworks handling
if (CMAKE_HOST_APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
find_program (_${_PYTHON_PREFIX}_CONFIG
NAMES ${_${_PYTHON_PREFIX}_CONFIG_NAMES}
NAMES_PER_DIR
PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES bin
NO_DEFAULT_PATH)
endif()
unset (_${_PYTHON_PREFIX}_CONFIG_NAMES)
if (_${_PYTHON_PREFIX}_CONFIG)
execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --help
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE __${_PYTHON_PREFIX}_HELP
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_${_PYTHON_PREFIX}_RESULT)
# assume config tool is not usable
unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
endif()
endif()
if (NOT _${_PYTHON_PREFIX}_CONFIG)
continue()
endif()
execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --abiflags
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE __${_PYTHON_PREFIX}_ABIFLAGS
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_${_PYTHON_PREFIX}_RESULT)
# assume ABI is not supported
set (__${_PYTHON_PREFIX}_ABIFLAGS "")
endif()
if (DEFINED _${_PYTHON_PREFIX}_FIND_ABI AND NOT __${_PYTHON_PREFIX}_ABIFLAGS IN_LIST _${_PYTHON_PREFIX}_ABIFLAGS)
# Wrong ABI
unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
continue()
endif()
if (_${_PYTHON_PREFIX}_CONFIG AND DEFINED CMAKE_LIBRARY_ARCHITECTURE)
# check that config tool match library architecture
execute_process (COMMAND "${_${_PYTHON_PREFIX}_CONFIG}" --configdir
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE _${_PYTHON_PREFIX}_CONFIGDIR
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (_${_PYTHON_PREFIX}_RESULT)
unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
continue()
endif()
string (FIND "${_${_PYTHON_PREFIX}_CONFIGDIR}" "${CMAKE_LIBRARY_ARCHITECTURE}" _${_PYTHON_PREFIX}_RESULT)
if (_${_PYTHON_PREFIX}_RESULT EQUAL -1)
unset (_${_PYTHON_PREFIX}_CONFIG CACHE)
continue()
endif()
endif()
if (_${_PYTHON_PREFIX}_CONFIG)
break()
endif()
endforeach()
endif()
endif()
endif()
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS)
if (NOT _${_PYTHON_PREFIX}_LIBRARY_RELEASE)
if ((${_PYTHON_PREFIX}_Interpreter_FOUND AND NOT CMAKE_CROSSCOMPILING) OR _${_PYTHON_PREFIX}_CONFIG)
# retrieve root install directory
_python_get_config_var (_${_PYTHON_PREFIX}_PREFIX PREFIX)
# enforce current ABI
_python_get_config_var (_${_PYTHON_PREFIX}_ABIFLAGS ABIFLAGS)
set (_${_PYTHON_PREFIX}_HINTS "${_${_PYTHON_PREFIX}_PREFIX}")
# retrieve library
## compute some paths and artifact names
if (_${_PYTHON_PREFIX}_CONFIG)
string (REGEX REPLACE "^.+python([0-9.]+)[a-z]*-config" "\\1" _${_PYTHON_PREFIX}_VERSION "${_${_PYTHON_PREFIX}_CONFIG}")
else()
set (_${_PYTHON_PREFIX}_VERSION "${${_PYTHON_PREFIX}_VERSION_MAJOR}.${${_PYTHON_PREFIX}_VERSION_MINOR}")
endif()
_python_get_path_suffixes (_${_PYTHON_PREFIX}_PATH_SUFFIXES VERSION ${_${_PYTHON_PREFIX}_VERSION} LIBRARY)
_python_get_names (_${_PYTHON_PREFIX}_LIB_NAMES VERSION ${_${_PYTHON_PREFIX}_VERSION} WIN32 POSIX LIBRARY)
_python_get_config_var (_${_PYTHON_PREFIX}_CONFIGDIR CONFIGDIR)
list (APPEND _${_PYTHON_PREFIX}_HINTS "${_${_PYTHON_PREFIX}_CONFIGDIR}")
list (APPEND _${_PYTHON_PREFIX}_HINTS "${${_PYTHON_PREFIX}_ROOT_DIR}" ENV ${_PYTHON_PREFIX}_ROOT_DIR)
find_library (_${_PYTHON_PREFIX}_LIBRARY_RELEASE
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
# Rely on HINTS and standard paths if interpreter or config tool failed to locate artifacts
if (NOT _${_PYTHON_PREFIX}_LIBRARY_RELEASE)
set (_${_PYTHON_PREFIX}_HINTS "${${_PYTHON_PREFIX}_ROOT_DIR}" ENV ${_PYTHON_PREFIX}_ROOT_DIR)
unset (_${_PYTHON_PREFIX}_VIRTUALENV_PATHS)
if (_${_PYTHON_PREFIX}_FIND_VIRTUALENV MATCHES "^(FIRST|ONLY)$")
set (_${_PYTHON_PREFIX}_VIRTUALENV_PATHS ENV VIRTUAL_ENV ENV CONDA_PREFIX)
endif()
if (_${_PYTHON_PREFIX}_FIND_STRATEGY STREQUAL "LOCATION")
# library names
_python_get_names (_${_PYTHON_PREFIX}_LIB_NAMES VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS} WIN32 POSIX LIBRARY)
_python_get_names (_${_PYTHON_PREFIX}_LIB_NAMES_DEBUG VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS} WIN32 DEBUG)
# Paths suffixes
_python_get_path_suffixes (_${_PYTHON_PREFIX}_PATH_SUFFIXES VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS} LIBRARY)
# Framework Paths
_python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS VERSION ${_${_PYTHON_PREFIX}_LIB_FIND_VERSIONS})
# Registry Paths
_python_get_registries (_${_PYTHON_PREFIX}_REGISTRY_PATHS VERSION ${_${_PYTHON_PREFIX}_FIND_VERSIONS} )
if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
find_library (_${_PYTHON_PREFIX}_LIBRARY_RELEASE
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
find_library (_${_PYTHON_PREFIX}_LIBRARY_RELEASE
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
# search in HINTS locations
find_library (_${_PYTHON_PREFIX}_LIBRARY_RELEASE
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
set (__${_PYTHON_PREFIX}_FRAMEWORK_PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS})
else()
unset (__${_PYTHON_PREFIX}_FRAMEWORK_PATHS)
endif()
if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "LAST")
set (__${_PYTHON_PREFIX}_REGISTRY_PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS})
else()
unset (__${_PYTHON_PREFIX}_REGISTRY_PATHS)
endif()
# search in all default paths
find_library (_${_PYTHON_PREFIX}_LIBRARY_RELEASE
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
NAMES_PER_DIR
PATHS ${__${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
${__${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES})
else()
foreach (_${_PYTHON_PREFIX}_LIB_VERSION IN LISTS _${_PYTHON_PREFIX}_FIND_VERSIONS)
_python_get_names (_${_PYTHON_PREFIX}_LIB_NAMES VERSION ${_${_PYTHON_PREFIX}_LIB_VERSION} WIN32 POSIX LIBRARY)
_python_get_names (_${_PYTHON_PREFIX}_LIB_NAMES_DEBUG VERSION ${_${_PYTHON_PREFIX}_LIB_VERSION} WIN32 DEBUG)
_python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS VERSION ${_${_PYTHON_PREFIX}_LIB_VERSION})
_python_get_registries (_${_PYTHON_PREFIX}_REGISTRY_PATHS VERSION ${_${_PYTHON_PREFIX}_LIB_VERSION})
_python_get_path_suffixes (_${_PYTHON_PREFIX}_PATH_SUFFIXES VERSION ${_${_PYTHON_PREFIX}_LIB_VERSION} LIBRARY)
if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
find_library (_${_PYTHON_PREFIX}_LIBRARY_RELEASE
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
find_library (_${_PYTHON_PREFIX}_LIBRARY_RELEASE
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
# search in HINTS locations
find_library (_${_PYTHON_PREFIX}_LIBRARY_RELEASE
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
NAMES_PER_DIR
HINTS ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
set (__${_PYTHON_PREFIX}_FRAMEWORK_PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS})
else()
unset (__${_PYTHON_PREFIX}_FRAMEWORK_PATHS)
endif()
if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "LAST")
set (__${_PYTHON_PREFIX}_REGISTRY_PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS})
else()
unset (__${_PYTHON_PREFIX}_REGISTRY_PATHS)
endif()
# search in all default paths
find_library (_${_PYTHON_PREFIX}_LIBRARY_RELEASE
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
NAMES_PER_DIR
PATHS ${__${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
${__${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES})
if (_${_PYTHON_PREFIX}_LIBRARY_RELEASE)
break()
endif()
endforeach()
endif()
endif()
endif()
# finalize library version information
_python_get_version (LIBRARY PREFIX _${_PYTHON_PREFIX}_)
if (_${_PYTHON_PREFIX}_VERSION EQUAL "${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}")
# not able to extract full version from library name
if (${_PYTHON_PREFIX}_Interpreter_FOUND)
# update from interpreter
set (_${_PYTHON_PREFIX}_VERSION ${${_PYTHON_PREFIX}_VERSION})
set (_${_PYTHON_PREFIX}_VERSION_MAJOR ${${_PYTHON_PREFIX}_VERSION_MAJOR})
set (_${_PYTHON_PREFIX}_VERSION_MINOR ${${_PYTHON_PREFIX}_VERSION_MINOR})
set (_${_PYTHON_PREFIX}_VERSION_PATCH ${${_PYTHON_PREFIX}_VERSION_PATCH})
endif()
endif()
set (${_PYTHON_PREFIX}_LIBRARY_RELEASE "${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}")
if (_${_PYTHON_PREFIX}_LIBRARY_RELEASE AND NOT EXISTS "${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}")
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Cannot find the library \"${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}\"")
set_property (CACHE _${_PYTHON_PREFIX}_LIBRARY_RELEASE PROPERTY VALUE "${_PYTHON_PREFIX}_LIBRARY_RELEASE-NOTFOUND")
endif()
set (_${_PYTHON_PREFIX}_HINTS "${${_PYTHON_PREFIX}_ROOT_DIR}" ENV ${_PYTHON_PREFIX}_ROOT_DIR)
if (WIN32 AND _${_PYTHON_PREFIX}_LIBRARY_RELEASE)
# search for debug library
# use release library location as a hint
_python_get_names (_${_PYTHON_PREFIX}_LIB_NAMES_DEBUG VERSION ${_${_PYTHON_PREFIX}_VERSION} WIN32 DEBUG)
get_filename_component (_${_PYTHON_PREFIX}_PATH "${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}" DIRECTORY)
find_library (_${_PYTHON_PREFIX}_LIBRARY_DEBUG
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES_DEBUG}
NAMES_PER_DIR
HINTS "${_${_PYTHON_PREFIX}_PATH}" ${_${_PYTHON_PREFIX}_HINTS}
NO_DEFAULT_PATH)
# second try including CMAKE variables to catch-up non conventional layouts
find_library (_${_PYTHON_PREFIX}_LIBRARY_DEBUG
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES_DEBUG}
NAMES_PER_DIR
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
# retrieve runtime libraries
if (_${_PYTHON_PREFIX}_LIBRARY_RELEASE)
_python_get_names (_${_PYTHON_PREFIX}_LIB_NAMES VERSION ${_${_PYTHON_PREFIX}_VERSION} WIN32 POSIX LIBRARY)
get_filename_component (_${_PYTHON_PREFIX}_PATH "${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}" DIRECTORY)
get_filename_component (_${_PYTHON_PREFIX}_PATH2 "${_${_PYTHON_PREFIX}_PATH}" DIRECTORY)
_python_find_runtime_library (_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES}
NAMES_PER_DIR
HINTS "${_${_PYTHON_PREFIX}_PATH}"
"${_${_PYTHON_PREFIX}_PATH2}" ${_${_PYTHON_PREFIX}_HINTS}
PATH_SUFFIXES bin)
endif()
if (_${_PYTHON_PREFIX}_LIBRARY_DEBUG)
_python_get_names (_${_PYTHON_PREFIX}_LIB_NAMES_DEBUG VERSION ${_${_PYTHON_PREFIX}_VERSION} WIN32 DEBUG)
get_filename_component (_${_PYTHON_PREFIX}_PATH "${_${_PYTHON_PREFIX}_LIBRARY_DEBUG}" DIRECTORY)
get_filename_component (_${_PYTHON_PREFIX}_PATH2 "${_${_PYTHON_PREFIX}_PATH}" DIRECTORY)
_python_find_runtime_library (_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG
NAMES ${_${_PYTHON_PREFIX}_LIB_NAMES_DEBUG}
NAMES_PER_DIR
HINTS "${_${_PYTHON_PREFIX}_PATH}"
"${_${_PYTHON_PREFIX}_PATH2}" ${_${_PYTHON_PREFIX}_HINTS}
PATH_SUFFIXES bin)
endif()
endif()
if ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS)
while (NOT _${_PYTHON_PREFIX}_INCLUDE_DIR)
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS
AND NOT _${_PYTHON_PREFIX}_LIBRARY_RELEASE)
# Don't search for include dir if no library was founded
break()
endif()
if ((${_PYTHON_PREFIX}_Interpreter_FOUND AND NOT CMAKE_CROSSCOMPILING) OR _${_PYTHON_PREFIX}_CONFIG)
_python_get_config_var (_${_PYTHON_PREFIX}_INCLUDE_DIRS INCLUDES)
find_path (_${_PYTHON_PREFIX}_INCLUDE_DIR
NAMES ${_${_PYTHON_PREFIX}_INCLUDE_NAMES}
HINTS ${_${_PYTHON_PREFIX}_INCLUDE_DIRS}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
# Rely on HINTS and standard paths if interpreter or config tool failed to locate artifacts
if (NOT _${_PYTHON_PREFIX}_INCLUDE_DIR)
unset (_${_PYTHON_PREFIX}_VIRTUALENV_PATHS)
if (_${_PYTHON_PREFIX}_FIND_VIRTUALENV MATCHES "^(FIRST|ONLY)$")
set (_${_PYTHON_PREFIX}_VIRTUALENV_PATHS ENV VIRTUAL_ENV ENV CONDA_PREFIX)
endif()
unset (_${_PYTHON_PREFIX}_INCLUDE_HINTS)
if (_${_PYTHON_PREFIX}_LIBRARY_RELEASE)
# Use the library's install prefix as a hint
if (_${_PYTHON_PREFIX}_LIBRARY_RELEASE MATCHES "^(.+/Frameworks/Python.framework/Versions/[0-9.]+)")
list (APPEND _${_PYTHON_PREFIX}_INCLUDE_HINTS "${CMAKE_MATCH_1}")
elseif (_${_PYTHON_PREFIX}_LIBRARY_RELEASE MATCHES "^(.+)/lib(64|32)?/python[0-9.]+/config")
list (APPEND _${_PYTHON_PREFIX}_INCLUDE_HINTS "${CMAKE_MATCH_1}")
elseif (DEFINED CMAKE_LIBRARY_ARCHITECTURE AND ${_${_PYTHON_PREFIX}_LIBRARY_RELEASE} MATCHES "^(.+)/lib/${CMAKE_LIBRARY_ARCHITECTURE}")
list (APPEND _${_PYTHON_PREFIX}_INCLUDE_HINTS "${CMAKE_MATCH_1}")
else()
# assume library is in a directory under root
get_filename_component (_${_PYTHON_PREFIX}_PREFIX "${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}" DIRECTORY)
get_filename_component (_${_PYTHON_PREFIX}_PREFIX "${_${_PYTHON_PREFIX}_PREFIX}" DIRECTORY)
list (APPEND _${_PYTHON_PREFIX}_INCLUDE_HINTS "${_${_PYTHON_PREFIX}_PREFIX}")
endif()
endif()
_python_get_frameworks (_${_PYTHON_PREFIX}_FRAMEWORK_PATHS VERSION ${_${_PYTHON_PREFIX}_VERSION})
_python_get_registries (_${_PYTHON_PREFIX}_REGISTRY_PATHS VERSION ${_${_PYTHON_PREFIX}_VERSION})
_python_get_path_suffixes (_${_PYTHON_PREFIX}_PATH_SUFFIXES VERSION ${_${_PYTHON_PREFIX}_VERSION} INCLUDE)
if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "FIRST")
find_path (_${_PYTHON_PREFIX}_INCLUDE_DIR
NAMES ${_${_PYTHON_PREFIX}_INCLUDE_NAMES}
HINTS ${_${_PYTHON_PREFIX}_INCLUDE_HINTS} ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "FIRST")
find_path (_${_PYTHON_PREFIX}_INCLUDE_DIR
NAMES ${_${_PYTHON_PREFIX}_INCLUDE_NAMES}
HINTS ${_${_PYTHON_PREFIX}_INCLUDE_HINTS} ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
${_${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
if (APPLE AND _${_PYTHON_PREFIX}_FIND_FRAMEWORK STREQUAL "LAST")
set (__${_PYTHON_PREFIX}_FRAMEWORK_PATHS ${_${_PYTHON_PREFIX}_FRAMEWORK_PATHS})
else()
unset (__${_PYTHON_PREFIX}_FRAMEWORK_PATHS)
endif()
if (WIN32 AND _${_PYTHON_PREFIX}_FIND_REGISTRY STREQUAL "LAST")
set (__${_PYTHON_PREFIX}_REGISTRY_PATHS ${_${_PYTHON_PREFIX}_REGISTRY_PATHS})
else()
unset (__${_PYTHON_PREFIX}_REGISTRY_PATHS)
endif()
find_path (_${_PYTHON_PREFIX}_INCLUDE_DIR
NAMES ${_${_PYTHON_PREFIX}_INCLUDE_NAMES}
HINTS ${_${_PYTHON_PREFIX}_INCLUDE_HINTS} ${_${_PYTHON_PREFIX}_HINTS}
PATHS ${_${_PYTHON_PREFIX}_VIRTUALENV_PATHS}
${__${_PYTHON_PREFIX}_FRAMEWORK_PATHS}
${__${_PYTHON_PREFIX}_REGISTRY_PATHS}
PATH_SUFFIXES ${_${_PYTHON_PREFIX}_PATH_SUFFIXES}
NO_SYSTEM_ENVIRONMENT_PATH
NO_CMAKE_SYSTEM_PATH)
endif()
# search header file in standard locations
find_path (_${_PYTHON_PREFIX}_INCLUDE_DIR
NAMES ${_${_PYTHON_PREFIX}_INCLUDE_NAMES})
break()
endwhile()
set (${_PYTHON_PREFIX}_INCLUDE_DIRS "${_${_PYTHON_PREFIX}_INCLUDE_DIR}")
if (_${_PYTHON_PREFIX}_INCLUDE_DIR AND NOT EXISTS "${_${_PYTHON_PREFIX}_INCLUDE_DIR}")
set (_${_PYTHON_PREFIX}_Development_REASON_FAILURE "Cannot find the directory \"${_${_PYTHON_PREFIX}_INCLUDE_DIR}\"")
set_property (CACHE _${_PYTHON_PREFIX}_INCLUDE_DIR PROPERTY VALUE "${_PYTHON_PREFIX}_INCLUDE_DIR-NOTFOUND")
endif()
if (_${_PYTHON_PREFIX}_INCLUDE_DIR)
# retrieve version from header file
_python_get_version (INCLUDE PREFIX _${_PYTHON_PREFIX}_INC_)
if (_${_PYTHON_PREFIX}_LIBRARY_RELEASE)
if ("${_${_PYTHON_PREFIX}_INC_VERSION_MAJOR}.${_${_PYTHON_PREFIX}_INC_VERSION_MINOR}"
VERSION_EQUAL _${_PYTHON_PREFIX}_VERSION)
# update versioning
set (_${_PYTHON_PREFIX}_VERSION ${_${_PYTHON_PREFIX}_INC_VERSION})
set (_${_PYTHON_PREFIX}_VERSION_PATCH ${_${_PYTHON_PREFIX}_INC_VERSION_PATCH})
endif()
else()
set (_${_PYTHON_PREFIX}_VERSION ${_${_PYTHON_PREFIX}_INC_VERSION})
set (_${_PYTHON_PREFIX}_VERSION_MAJOR ${_${_PYTHON_PREFIX}_INC_VERSION_MAJOR})
set (_${_PYTHON_PREFIX}_VERSION_MINOR ${_${_PYTHON_PREFIX}_INC_VERSION_MINOR})
set (_${_PYTHON_PREFIX}_VERSION_PATCH ${_${_PYTHON_PREFIX}_INC_VERSION_PATCH})
endif()
endif()
endif()
if (NOT ${_PYTHON_PREFIX}_Interpreter_FOUND AND NOT ${_PYTHON_PREFIX}_Compiler_FOUND)
# set public version information
set (${_PYTHON_PREFIX}_VERSION ${_${_PYTHON_PREFIX}_VERSION})
set (${_PYTHON_PREFIX}_VERSION_MAJOR ${_${_PYTHON_PREFIX}_VERSION_MAJOR})
set (${_PYTHON_PREFIX}_VERSION_MINOR ${_${_PYTHON_PREFIX}_VERSION_MINOR})
set (${_PYTHON_PREFIX}_VERSION_PATCH ${_${_PYTHON_PREFIX}_VERSION_PATCH})
endif()
# define public variables
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS)
set (${_PYTHON_PREFIX}_LIBRARY_DEBUG "${_${_PYTHON_PREFIX}_LIBRARY_DEBUG}")
_python_select_library_configurations (${_PYTHON_PREFIX})
set (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE "${_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE}")
set (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG "${_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG}")
if (_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE)
set (${_PYTHON_PREFIX}_RUNTIME_LIBRARY "${_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE}")
elseif (_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG)
set (${_PYTHON_PREFIX}_RUNTIME_LIBRARY "${_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG}")
else()
set (${_PYTHON_PREFIX}_RUNTIME_LIBRARY "${_PYTHON_PREFIX}_RUNTIME_LIBRARY-NOTFOUND")
endif()
_python_set_library_dirs (${_PYTHON_PREFIX}_LIBRARY_DIRS
_${_PYTHON_PREFIX}_LIBRARY_RELEASE
_${_PYTHON_PREFIX}_LIBRARY_DEBUG)
if (UNIX)
if (_${_PYTHON_PREFIX}_LIBRARY_RELEASE MATCHES "${CMAKE_SHARED_LIBRARY_SUFFIX}$")
set (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DIRS ${${_PYTHON_PREFIX}_LIBRARY_DIRS})
endif()
else()
_python_set_library_dirs (${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DIRS
_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE
_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG)
endif()
endif()
if (_${_PYTHON_PREFIX}_LIBRARY_RELEASE OR _${_PYTHON_PREFIX}_INCLUDE_DIR)
if (${_PYTHON_PREFIX}_Interpreter_FOUND OR ${_PYTHON_PREFIX}_Compiler_FOUND)
# development environment must be compatible with interpreter/compiler
if ("${_${_PYTHON_PREFIX}_VERSION_MAJOR}.${_${_PYTHON_PREFIX}_VERSION_MINOR}" VERSION_EQUAL "${${_PYTHON_PREFIX}_VERSION_MAJOR}.${${_PYTHON_PREFIX}_VERSION_MINOR}"
AND "${_${_PYTHON_PREFIX}_INC_VERSION_MAJOR}.${_${_PYTHON_PREFIX}_INC_VERSION_MINOR}" VERSION_EQUAL "${_${_PYTHON_PREFIX}_VERSION_MAJOR}.${_${_PYTHON_PREFIX}_VERSION_MINOR}")
_python_set_development_module_found (Module)
_python_set_development_module_found (Embed)
endif()
elseif (${_PYTHON_PREFIX}_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR
AND "${_${_PYTHON_PREFIX}_INC_VERSION_MAJOR}.${_${_PYTHON_PREFIX}_INC_VERSION_MINOR}" VERSION_EQUAL "${_${_PYTHON_PREFIX}_VERSION_MAJOR}.${_${_PYTHON_PREFIX}_VERSION_MINOR}")
_python_set_development_module_found (Module)
_python_set_development_module_found (Embed)
endif()
if (DEFINED _${_PYTHON_PREFIX}_FIND_ABI AND
(NOT _${_PYTHON_PREFIX}_ABI IN_LIST _${_PYTHON_PREFIX}_ABIFLAGS
OR NOT _${_PYTHON_PREFIX}_INC_ABI IN_LIST _${_PYTHON_PREFIX}_ABIFLAGS))
set (${_PYTHON_PREFIX}_Development.Module_FOUND FALSE)
set (${_PYTHON_PREFIX}_Development.Embed_FOUND FALSE)
endif()
endif()
if (( ${_PYTHON_PREFIX}_Development.Module_FOUND
AND ${_PYTHON_PREFIX}_Development.Embed_FOUND)
OR (NOT "Development.Module" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
AND ${_PYTHON_PREFIX}_Development.Embed_FOUND)
OR (NOT "Development.Embed" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
AND ${_PYTHON_PREFIX}_Development.Module_FOUND))
unset (_${_PYTHON_PREFIX}_Development_REASON_FAILURE)
endif()
if ("Development" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
AND ${_PYTHON_PREFIX}_Development.Module_FOUND
AND ${_PYTHON_PREFIX}_Development.Embed_FOUND)
set (${_PYTHON_PREFIX}_Development_FOUND TRUE)
endif()
if ((${_PYTHON_PREFIX}_Development.Module_FOUND
OR ${_PYTHON_PREFIX}_Development.Embed_FOUND)
AND EXISTS "${_${_PYTHON_PREFIX}_INCLUDE_DIR}/PyPy.h")
# retrieve PyPy version
file (STRINGS "${_${_PYTHON_PREFIX}_INCLUDE_DIR}/patchlevel.h" ${_PYTHON_PREFIX}_PyPy_VERSION
REGEX "^#define[ \t]+PYPY_VERSION[ \t]+\"[^\"]+\"")
string (REGEX REPLACE "^#define[ \t]+PYPY_VERSION[ \t]+\"([^\"]+)\".*" "\\1"
${_PYTHON_PREFIX}_PyPy_VERSION "${${_PYTHON_PREFIX}_PyPy_VERSION}")
endif()
unset(${_PYTHON_PREFIX}_LINK_OPTIONS)
if (${_PYTHON_PREFIX}_Development.Embed_FOUND AND APPLE
AND ${_PYTHON_PREFIX}_LIBRARY_RELEASE MATCHES "${CMAKE_SHARED_LIBRARY_SUFFIX}$")
# rpath must be specified if python is part of a framework
unset(_${_PYTHON_PREFIX}_is_prefix)
foreach (_${_PYTHON_PREFIX}_implementation IN LISTS _${_PYTHON_PREFIX}_FIND_IMPLEMENTATIONS)
foreach (_${_PYTHON_PREFIX}_framework IN LISTS _${_PYTHON_PREFIX}_${_${_PYTHON_PREFIX}_implementation}_FRAMEWORKS)
cmake_path (IS_PREFIX _${_PYTHON_PREFIX}_framework "${${_PYTHON_PREFIX}_LIBRARY_RELEASE}" _${_PYTHON_PREFIX}_is_prefix)
if (_${_PYTHON_PREFIX}_is_prefix)
cmake_path (GET _${_PYTHON_PREFIX}_framework PARENT_PATH _${_PYTHON_PREFIX}_framework)
set (${_PYTHON_PREFIX}_LINK_OPTIONS "LINKER:-rpath,${_${_PYTHON_PREFIX}_framework}")
break()
endif()
endforeach()
if (_${_PYTHON_PREFIX}_is_prefix)
break()
endif()
endforeach()
unset(_${_PYTHON_PREFIX}_implementation)
unset(_${_PYTHON_PREFIX}_framework)
unset(_${_PYTHON_PREFIX}_is_prefix)
endif()
if (NOT DEFINED ${_PYTHON_PREFIX}_SOABI)
_python_get_config_var (${_PYTHON_PREFIX}_SOABI SOABI)
endif()
_python_compute_development_signature (Module)
_python_compute_development_signature (Embed)
# Restore the original find library ordering
if (DEFINED _${_PYTHON_PREFIX}_CMAKE_FIND_LIBRARY_SUFFIXES)
set (CMAKE_FIND_LIBRARY_SUFFIXES ${_${_PYTHON_PREFIX}_CMAKE_FIND_LIBRARY_SUFFIXES})
endif()
if (${_PYTHON_PREFIX}_ARTIFACTS_INTERACTIVE)
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS)
set (${_PYTHON_PREFIX}_LIBRARY "${_${_PYTHON_PREFIX}_LIBRARY_RELEASE}" CACHE FILEPATH "${_PYTHON_PREFIX} Library")
endif()
if ("INCLUDE_DIR" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_ARTIFACTS)
set (${_PYTHON_PREFIX}_INCLUDE_DIR "${_${_PYTHON_PREFIX}_INCLUDE_DIR}" CACHE FILEPATH "${_PYTHON_PREFIX} Include Directory")
endif()
endif()
_python_mark_as_internal (_${_PYTHON_PREFIX}_LIBRARY_RELEASE
_${_PYTHON_PREFIX}_LIBRARY_DEBUG
_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE
_${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG
_${_PYTHON_PREFIX}_INCLUDE_DIR
_${_PYTHON_PREFIX}_CONFIG
_${_PYTHON_PREFIX}_DEVELOPMENT_MODULE_SIGNATURE
_${_PYTHON_PREFIX}_DEVELOPMENT_EMBED_SIGNATURE)
endif()
if (${_PYTHON_PREFIX}_FIND_REQUIRED_NumPy)
list (APPEND _${_PYTHON_PREFIX}_REQUIRED_VARS ${_PYTHON_PREFIX}_NumPy_INCLUDE_DIRS)
endif()
if ("NumPy" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS AND ${_PYTHON_PREFIX}_Interpreter_FOUND)
list (APPEND _${_PYTHON_PREFIX}_CACHED_VARS _${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR)
if (DEFINED ${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR
AND IS_ABSOLUTE "${${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR}")
set (_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR "${${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR}" CACHE INTERNAL "")
elseif (DEFINED _${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR)
# compute numpy signature. Depends on interpreter and development signatures
string (MD5 __${_PYTHON_PREFIX}_NUMPY_SIGNATURE "${_${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE}:${_${_PYTHON_PREFIX}_DEVELOPMENT_MODULE_SIGNATURE}:${_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR}")
if (NOT __${_PYTHON_PREFIX}_NUMPY_SIGNATURE STREQUAL _${_PYTHON_PREFIX}_NUMPY_SIGNATURE
OR NOT EXISTS "${_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR}")
unset (_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR CACHE)
unset (_${_PYTHON_PREFIX}_NUMPY_SIGNATURE CACHE)
endif()
endif()
if (NOT _${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR)
execute_process(COMMAND ${${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys\ntry: import numpy; sys.stdout.write(numpy.get_include())\nexcept:pass\n"
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE _${_PYTHON_PREFIX}_NumPy_PATH
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if (NOT _${_PYTHON_PREFIX}_RESULT)
find_path (_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR
NAMES "numpy/arrayobject.h" "numpy/numpyconfig.h"
HINTS "${_${_PYTHON_PREFIX}_NumPy_PATH}"
NO_DEFAULT_PATH)
endif()
endif()
set (${_PYTHON_PREFIX}_NumPy_INCLUDE_DIRS "${_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR}")
if(_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR AND NOT EXISTS "${_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR}")
set (_${_PYTHON_PREFIX}_NumPy_REASON_FAILURE "Cannot find the directory \"${_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR}\"")
set_property (CACHE _${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR PROPERTY VALUE "${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR-NOTFOUND")
endif()
if (_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR)
execute_process (COMMAND ${${_PYTHON_PREFIX}_INTERPRETER_LAUNCHER} "${_${_PYTHON_PREFIX}_EXECUTABLE}" -c
"import sys\ntry: import numpy; sys.stdout.write(numpy.__version__)\nexcept:pass\n"
RESULT_VARIABLE _${_PYTHON_PREFIX}_RESULT
OUTPUT_VARIABLE _${_PYTHON_PREFIX}_NumPy_VERSION)
if (NOT _${_PYTHON_PREFIX}_RESULT)
set (${_PYTHON_PREFIX}_NumPy_VERSION "${_${_PYTHON_PREFIX}_NumPy_VERSION}")
else()
unset (${_PYTHON_PREFIX}_NumPy_VERSION)
endif()
# final step: set NumPy founded only if Development.Module component is founded as well
set(${_PYTHON_PREFIX}_NumPy_FOUND ${${_PYTHON_PREFIX}_Development.Module_FOUND})
else()
set (${_PYTHON_PREFIX}_NumPy_FOUND FALSE)
endif()
if (${_PYTHON_PREFIX}_NumPy_FOUND)
unset (_${_PYTHON_PREFIX}_NumPy_REASON_FAILURE)
# compute and save numpy signature
string (MD5 __${_PYTHON_PREFIX}_NUMPY_SIGNATURE "${_${_PYTHON_PREFIX}_INTERPRETER_SIGNATURE}:${_${_PYTHON_PREFIX}_DEVELOPMENT_MODULE_SIGNATURE}:${${_PYTHON_PREFIX}_NumPyINCLUDE_DIR}")
set (_${_PYTHON_PREFIX}_NUMPY_SIGNATURE "${__${_PYTHON_PREFIX}_NUMPY_SIGNATURE}" CACHE INTERNAL "")
else()
unset (_${_PYTHON_PREFIX}_NUMPY_SIGNATURE CACHE)
endif()
if (${_PYTHON_PREFIX}_ARTIFACTS_INTERACTIVE)
set (${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR "${_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR}" CACHE FILEPATH "${_PYTHON_PREFIX} NumPy Include Directory")
endif()
_python_mark_as_internal (_${_PYTHON_PREFIX}_NumPy_INCLUDE_DIR
_${_PYTHON_PREFIX}_NUMPY_SIGNATURE)
endif()
# final validation
if (${_PYTHON_PREFIX}_VERSION_MAJOR AND
NOT ${_PYTHON_PREFIX}_VERSION_MAJOR VERSION_EQUAL _${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR)
_python_display_failure ("Could NOT find ${_PYTHON_PREFIX}: Found unsuitable major version \"${${_PYTHON_PREFIX}_VERSION_MAJOR}\", but required major version is exact version \"${_${_PYTHON_PREFIX}_REQUIRED_VERSION_MAJOR}\"")
cmake_policy(POP)
return()
endif()
unset (_${_PYTHON_PREFIX}_REASON_FAILURE)
foreach (_${_PYTHON_PREFIX}_COMPONENT IN ITEMS Interpreter Compiler Development NumPy)
if (_${_PYTHON_PREFIX}_${_${_PYTHON_PREFIX}_COMPONENT}_REASON_FAILURE)
string (APPEND _${_PYTHON_PREFIX}_REASON_FAILURE "\n ${_${_PYTHON_PREFIX}_COMPONENT}: ${_${_PYTHON_PREFIX}_${_${_PYTHON_PREFIX}_COMPONENT}_REASON_FAILURE}")
unset (_${_PYTHON_PREFIX}_${_${_PYTHON_PREFIX}_COMPONENT}_REASON_FAILURE)
endif()
endforeach()
find_package_handle_standard_args (${_PYTHON_PREFIX}
REQUIRED_VARS ${_${_PYTHON_PREFIX}_REQUIRED_VARS}
VERSION_VAR ${_PYTHON_PREFIX}_VERSION
HANDLE_VERSION_RANGE
HANDLE_COMPONENTS
REASON_FAILURE_MESSAGE "${_${_PYTHON_PREFIX}_REASON_FAILURE}")
# Create imported targets and helper functions
if(_${_PYTHON_PREFIX}_CMAKE_ROLE STREQUAL "PROJECT")
if ("Interpreter" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
AND ${_PYTHON_PREFIX}_Interpreter_FOUND
AND NOT TARGET ${_PYTHON_PREFIX}::Interpreter)
add_executable (${_PYTHON_PREFIX}::Interpreter IMPORTED)
set_property (TARGET ${_PYTHON_PREFIX}::Interpreter
PROPERTY IMPORTED_LOCATION "${${_PYTHON_PREFIX}_EXECUTABLE}")
endif()
if ("Compiler" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
AND ${_PYTHON_PREFIX}_Compiler_FOUND
AND NOT TARGET ${_PYTHON_PREFIX}::Compiler)
add_executable (${_PYTHON_PREFIX}::Compiler IMPORTED)
set_property (TARGET ${_PYTHON_PREFIX}::Compiler
PROPERTY IMPORTED_LOCATION "${${_PYTHON_PREFIX}_COMPILER}")
endif()
if (("Development.Module" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
AND ${_PYTHON_PREFIX}_Development.Module_FOUND)
OR ("Development.Embed" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS
AND ${_PYTHON_PREFIX}_Development.Embed_FOUND))
macro (__PYTHON_IMPORT_LIBRARY __name)
if (${_PYTHON_PREFIX}_LIBRARY_RELEASE MATCHES "${CMAKE_SHARED_LIBRARY_SUFFIX}$"
OR ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE)
set (_${_PYTHON_PREFIX}_LIBRARY_TYPE SHARED)
else()
set (_${_PYTHON_PREFIX}_LIBRARY_TYPE STATIC)
endif()
if (NOT TARGET ${__name})
add_library (${__name} ${_${_PYTHON_PREFIX}_LIBRARY_TYPE} IMPORTED)
endif()
set_property (TARGET ${__name}
PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${_PYTHON_PREFIX}_INCLUDE_DIRS}")
if (${_PYTHON_PREFIX}_LIBRARY_RELEASE AND ${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE)
# System manage shared libraries in two parts: import and runtime
if (${_PYTHON_PREFIX}_LIBRARY_RELEASE AND ${_PYTHON_PREFIX}_LIBRARY_DEBUG)
set_property (TARGET ${__name} PROPERTY IMPORTED_CONFIGURATIONS RELEASE DEBUG)
set_target_properties (${__name}
PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C"
IMPORTED_IMPLIB_RELEASE "${${_PYTHON_PREFIX}_LIBRARY_RELEASE}"
IMPORTED_LOCATION_RELEASE "${${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE}")
set_target_properties (${__name}
PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C"
IMPORTED_IMPLIB_DEBUG "${${_PYTHON_PREFIX}_LIBRARY_DEBUG}"
IMPORTED_LOCATION_DEBUG "${${_PYTHON_PREFIX}_RUNTIME_LIBRARY_DEBUG}")
else()
set_target_properties (${__name}
PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES "C"
IMPORTED_IMPLIB "${${_PYTHON_PREFIX}_LIBRARIES}"
IMPORTED_LOCATION "${${_PYTHON_PREFIX}_RUNTIME_LIBRARY_RELEASE}")
endif()
else()
if (${_PYTHON_PREFIX}_LIBRARY_RELEASE AND ${_PYTHON_PREFIX}_LIBRARY_DEBUG)
set_property (TARGET ${__name} PROPERTY IMPORTED_CONFIGURATIONS RELEASE DEBUG)
set_target_properties (${__name}
PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C"
IMPORTED_LOCATION_RELEASE "${${_PYTHON_PREFIX}_LIBRARY_RELEASE}")
set_target_properties (${__name}
PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C"
IMPORTED_LOCATION_DEBUG "${${_PYTHON_PREFIX}_LIBRARY_DEBUG}")
else()
set_target_properties (${__name}
PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES "C"
IMPORTED_LOCATION "${${_PYTHON_PREFIX}_LIBRARY_RELEASE}")
endif()
endif()
if (_${_PYTHON_PREFIX}_LIBRARY_TYPE STREQUAL "STATIC")
# extend link information with dependent libraries
_python_get_config_var (_${_PYTHON_PREFIX}_LINK_LIBRARIES LIBS)
if (_${_PYTHON_PREFIX}_LINK_LIBRARIES)
set_property (TARGET ${__name}
PROPERTY INTERFACE_LINK_LIBRARIES ${_${_PYTHON_PREFIX}_LINK_LIBRARIES})
endif()
endif()
if (${_PYTHON_PREFIX}_LINK_OPTIONS
AND _${_PYTHON_PREFIX}_LIBRARY_TYPE STREQUAL "SHARED")
set_property (TARGET ${__name} PROPERTY INTERFACE_LINK_OPTIONS "${${_PYTHON_PREFIX}_LINK_OPTIONS}")
endif()
endmacro()
if (${_PYTHON_PREFIX}_Development.Embed_FOUND)
__python_import_library (${_PYTHON_PREFIX}::Python)
endif()
if (${_PYTHON_PREFIX}_Development.Module_FOUND)
if ("LIBRARY" IN_LIST _${_PYTHON_PREFIX}_FIND_DEVELOPMENT_MODULE_ARTIFACTS)
# On Windows/CYGWIN/MSYS, Python::Module is the same as Python::Python
# but ALIAS cannot be used because the imported library is not GLOBAL.
__python_import_library (${_PYTHON_PREFIX}::Module)
else()
if (NOT TARGET ${_PYTHON_PREFIX}::Module)
add_library (${_PYTHON_PREFIX}::Module INTERFACE IMPORTED)
endif()
set_property (TARGET ${_PYTHON_PREFIX}::Module
PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${_PYTHON_PREFIX}_INCLUDE_DIRS}")
# When available, enforce shared library generation with undefined symbols
if (APPLE)
set_property (TARGET ${_PYTHON_PREFIX}::Module
PROPERTY INTERFACE_LINK_OPTIONS "LINKER:-undefined,dynamic_lookup")
endif()
if (CMAKE_SYSTEM_NAME STREQUAL "SunOS")
set_property (TARGET ${_PYTHON_PREFIX}::Module
PROPERTY INTERFACE_LINK_OPTIONS "LINKER:-z,nodefs")
endif()
if (CMAKE_SYSTEM_NAME STREQUAL "AIX")
set_property (TARGET ${_PYTHON_PREFIX}::Module
PROPERTY INTERFACE_LINK_OPTIONS "LINKER:-b,erok")
endif()
endif()
endif()
#
# PYTHON_ADD_LIBRARY ( [STATIC|SHARED|MODULE] src1 src2 ... srcN)
# It is used to build modules for python.
#
function (__${_PYTHON_PREFIX}_ADD_LIBRARY prefix name)
cmake_parse_arguments (PARSE_ARGV 2 PYTHON_ADD_LIBRARY "STATIC;SHARED;MODULE;WITH_SOABI" "" "")
if (PYTHON_ADD_LIBRARY_STATIC)
set (type STATIC)
elseif (PYTHON_ADD_LIBRARY_SHARED)
set (type SHARED)
else()
set (type MODULE)
endif()
if (type STREQUAL "MODULE" AND NOT TARGET ${prefix}::Module)
message (SEND_ERROR "${prefix}_ADD_LIBRARY: dependent target '${prefix}::Module' is not defined.\n Did you miss to request COMPONENT 'Development.Module'?")
return()
endif()
if (NOT type STREQUAL "MODULE" AND NOT TARGET ${prefix}::Python)
message (SEND_ERROR "${prefix}_ADD_LIBRARY: dependent target '${prefix}::Python' is not defined.\n Did you miss to request COMPONENT 'Development.Embed'?")
return()
endif()
add_library (${name} ${type} ${PYTHON_ADD_LIBRARY_UNPARSED_ARGUMENTS})
get_property (type TARGET ${name} PROPERTY TYPE)
if (type STREQUAL "MODULE_LIBRARY")
target_link_libraries (${name} PRIVATE ${prefix}::Module)
# customize library name to follow module name rules
set_property (TARGET ${name} PROPERTY PREFIX "")
if(CMAKE_SYSTEM_NAME STREQUAL "Windows")
set_property (TARGET ${name} PROPERTY SUFFIX ".pyd")
endif()
if (PYTHON_ADD_LIBRARY_WITH_SOABI AND ${prefix}_SOABI)
get_property (suffix TARGET ${name} PROPERTY SUFFIX)
if (NOT suffix)
set (suffix "${CMAKE_SHARED_MODULE_SUFFIX}")
endif()
set_property (TARGET ${name} PROPERTY SUFFIX ".${${prefix}_SOABI}${suffix}")
endif()
else()
if (PYTHON_ADD_LIBRARY_WITH_SOABI)
message (AUTHOR_WARNING "Find${prefix}: Option `WITH_SOABI` is only supported for `MODULE` library type.")
endif()
target_link_libraries (${name} PRIVATE ${prefix}::Python)
endif()
endfunction()
endif()
if ("NumPy" IN_LIST ${_PYTHON_PREFIX}_FIND_COMPONENTS AND ${_PYTHON_PREFIX}_NumPy_FOUND
AND NOT TARGET ${_PYTHON_PREFIX}::NumPy AND TARGET ${_PYTHON_PREFIX}::Module)
add_library (${_PYTHON_PREFIX}::NumPy INTERFACE IMPORTED)
set_property (TARGET ${_PYTHON_PREFIX}::NumPy
PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${_PYTHON_PREFIX}_NumPy_INCLUDE_DIRS}")
target_link_libraries (${_PYTHON_PREFIX}::NumPy INTERFACE ${_PYTHON_PREFIX}::Module)
endif()
endif()
# final clean-up
# Restore CMAKE_FIND_APPBUNDLE
if (DEFINED _${_PYTHON_PREFIX}_CMAKE_FIND_APPBUNDLE)
set (CMAKE_FIND_APPBUNDLE ${_${_PYTHON_PREFIX}_CMAKE_FIND_APPBUNDLE})
unset (_${_PYTHON_PREFIX}_CMAKE_FIND_APPBUNDLE)
else()
unset (CMAKE_FIND_APPBUNDLE)
endif()
# Restore CMAKE_FIND_FRAMEWORK
if (DEFINED _${_PYTHON_PREFIX}_CMAKE_FIND_FRAMEWORK)
set (CMAKE_FIND_FRAMEWORK ${_${_PYTHON_PREFIX}_CMAKE_FIND_FRAMEWORK})
unset (_${_PYTHON_PREFIX}_CMAKE_FIND_FRAMEWORK)
else()
unset (CMAKE_FIND_FRAMEWORK)
endif()
cmake_policy(POP)
================================================
FILE: scenario/cmake/FindPython3.cmake
================================================
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
# file Copyright.txt or https://cmake.org/licensing for details.
#[=======================================================================[.rst:
FindPython3
-----------
.. versionadded:: 3.12
Find Python 3 interpreter, compiler and development environment (include
directories and libraries).
.. versionadded:: 3.19
When a version is requested, it can be specified as a simple value or as a
range. For a detailed description of version range usage and capabilities,
refer to the :command:`find_package` command.
The following components are supported:
* ``Interpreter``: search for Python 3 interpreter
* ``Compiler``: search for Python 3 compiler. Only offered by IronPython.
* ``Development``: search for development artifacts (include directories and
libraries).
.. versionadded:: 3.18
This component includes two sub-components which can be specified
independently:
* ``Development.Module``: search for artifacts for Python 3 module
developments.
* ``Development.Embed``: search for artifacts for Python 3 embedding
developments.
* ``NumPy``: search for NumPy include directories.
.. versionadded:: 3.14
Added the ``NumPy`` component.
If no ``COMPONENTS`` are specified, ``Interpreter`` is assumed.
If component ``Development`` is specified, it implies sub-components
``Development.Module`` and ``Development.Embed``.
To ensure consistent versions between components ``Interpreter``, ``Compiler``,
``Development`` (or one of its sub-components) and ``NumPy``, specify all
components at the same time::
find_package (Python3 COMPONENTS Interpreter Development)
This module looks only for version 3 of Python. This module can be used
concurrently with :module:`FindPython2` module to use both Python versions.
The :module:`FindPython` module can be used if Python version does not matter
for you.
.. note::
If components ``Interpreter`` and ``Development`` (or one of its
sub-components) are both specified, this module search only for interpreter
with same platform architecture as the one defined by ``CMake``
configuration. This constraint does not apply if only ``Interpreter``
component is specified.
Imported Targets
^^^^^^^^^^^^^^^^
This module defines the following :ref:`Imported Targets `:
.. versionchanged:: 3.14
:ref:`Imported Targets ` are only created when
:prop_gbl:`CMAKE_ROLE` is ``PROJECT``.
``Python3::Interpreter``
Python 3 interpreter. Target defined if component ``Interpreter`` is found.
``Python3::Compiler``
Python 3 compiler. Target defined if component ``Compiler`` is found.
``Python3::Module``
.. versionadded:: 3.15
Python 3 library for Python module. Target defined if component
``Development.Module`` is found.
``Python3::Python``
Python 3 library for Python embedding. Target defined if component
``Development.Embed`` is found.
``Python3::NumPy``
.. versionadded:: 3.14
NumPy library for Python 3. Target defined if component ``NumPy`` is found.
Result Variables
^^^^^^^^^^^^^^^^
This module will set the following variables in your project
(see :ref:`Standard Variable Names `):
``Python3_FOUND``
System has the Python 3 requested components.
``Python3_Interpreter_FOUND``
System has the Python 3 interpreter.
``Python3_EXECUTABLE``
Path to the Python 3 interpreter.
``Python3_INTERPRETER_ID``
A short string unique to the interpreter. Possible values include:
* Python
* ActivePython
* Anaconda
* Canopy
* IronPython
* PyPy
``Python3_STDLIB``
Standard platform independent installation directory.
Information returned by
``distutils.sysconfig.get_python_lib(plat_specific=False,standard_lib=True)``
or else ``sysconfig.get_path('stdlib')``.
``Python3_STDARCH``
Standard platform dependent installation directory.
Information returned by
``distutils.sysconfig.get_python_lib(plat_specific=True,standard_lib=True)``
or else ``sysconfig.get_path('platstdlib')``.
``Python3_SITELIB``
Third-party platform independent installation directory.
Information returned by
``distutils.sysconfig.get_python_lib(plat_specific=False,standard_lib=False)``
or else ``sysconfig.get_path('purelib')``.
``Python3_SITEARCH``
Third-party platform dependent installation directory.
Information returned by
``distutils.sysconfig.get_python_lib(plat_specific=True,standard_lib=False)``
or else ``sysconfig.get_path('platlib')``.
``Python3_SOABI``
.. versionadded:: 3.17
Extension suffix for modules.
Information returned by
``distutils.sysconfig.get_config_var('SOABI')`` or computed from
``distutils.sysconfig.get_config_var('EXT_SUFFIX')`` or
``python3-config --extension-suffix``. If package ``distutils.sysconfig`` is
not available, ``sysconfig.get_config_var('SOABI')`` or
``sysconfig.get_config_var('EXT_SUFFIX')`` are used.
``Python3_Compiler_FOUND``
System has the Python 3 compiler.
``Python3_COMPILER``
Path to the Python 3 compiler. Only offered by IronPython.
``Python3_COMPILER_ID``
A short string unique to the compiler. Possible values include:
* IronPython
``Python3_DOTNET_LAUNCHER``
.. versionadded:: 3.18
The ``.Net`` interpreter. Only used by ``IronPython`` implementation.
``Python3_Development_FOUND``
System has the Python 3 development artifacts.
``Python3_Development.Module_FOUND``
.. versionadded:: 3.18
System has the Python 3 development artifacts for Python module.
``Python3_Development.Embed_FOUND``
.. versionadded:: 3.18
System has the Python 3 development artifacts for Python embedding.
``Python3_INCLUDE_DIRS``
The Python 3 include directories.
``Python3_LINK_OPTIONS``
.. versionadded:: 3.19
The Python 3 link options. Some configurations require specific link options
for a correct build and execution.
``Python3_LIBRARIES``
The Python 3 libraries.
``Python3_LIBRARY_DIRS``
The Python 3 library directories.
``Python3_RUNTIME_LIBRARY_DIRS``
The Python 3 runtime library directories.
``Python3_VERSION``
Python 3 version.
``Python3_VERSION_MAJOR``
Python 3 major version.
``Python3_VERSION_MINOR``
Python 3 minor version.
``Python3_VERSION_PATCH``
Python 3 patch version.
``Python3_PyPy_VERSION``
.. versionadded:: 3.18
Python 3 PyPy version.
``Python3_NumPy_FOUND``
.. versionadded:: 3.14
System has the NumPy.
``Python3_NumPy_INCLUDE_DIRS``
.. versionadded:: 3.14
The NumPy include directories.
``Python3_NumPy_VERSION``
.. versionadded:: 3.14
The NumPy version.
Hints
^^^^^
``Python3_ROOT_DIR``
Define the root directory of a Python 3 installation.
``Python3_USE_STATIC_LIBS``
* If not defined, search for shared libraries and static libraries in that
order.
* If set to TRUE, search **only** for static libraries.
* If set to FALSE, search **only** for shared libraries.
``Python3_FIND_ABI``
.. versionadded:: 3.16
This variable defines which ABIs, as defined in
`PEP 3149 `_, should be searched.
.. note::
If ``Python3_FIND_ABI`` is not defined, any ABI will be searched.
The ``Python3_FIND_ABI`` variable is a 3-tuple specifying, in that order,
``pydebug`` (``d``), ``pymalloc`` (``m``) and ``unicode`` (``u``) flags.
Each element can be set to one of the following:
* ``ON``: Corresponding flag is selected.
* ``OFF``: Corresponding flag is not selected.
* ``ANY``: The two possibilities (``ON`` and ``OFF``) will be searched.
From this 3-tuple, various ABIs will be searched starting from the most
specialized to the most general. Moreover, ``debug`` versions will be
searched **after** ``non-debug`` ones.
For example, if we have::
set (Python3_FIND_ABI "ON" "ANY" "ANY")
The following flags combinations will be appended, in that order, to the
artifact names: ``dmu``, ``dm``, ``du``, and ``d``.
And to search any possible ABIs::
set (Python3_FIND_ABI "ANY" "ANY" "ANY")
The following combinations, in that order, will be used: ``mu``, ``m``,
``u``, ````, ``dmu``, ``dm``, ``du`` and ``d``.
.. note::
This hint is useful only on ``POSIX`` systems. So, on ``Windows`` systems,
when ``Python3_FIND_ABI`` is defined, ``Python`` distributions from
`python.org `_ will be found only if value for
each flag is ``OFF`` or ``ANY``.
``Python3_FIND_STRATEGY``
.. versionadded:: 3.15
This variable defines how lookup will be done.
The ``Python3_FIND_STRATEGY`` variable can be set to one of the following:
* ``VERSION``: Try to find the most recent version in all specified
locations.
This is the default if policy :policy:`CMP0094` is undefined or set to
``OLD``.
* ``LOCATION``: Stops lookup as soon as a version satisfying version
constraints is founded.
This is the default if policy :policy:`CMP0094` is set to ``NEW``.
``Python3_FIND_REGISTRY``
.. versionadded:: 3.13
On Windows the ``Python3_FIND_REGISTRY`` variable determine the order
of preference between registry and environment variables.
The ``Python3_FIND_REGISTRY`` variable can be set to one of the following:
* ``FIRST``: Try to use registry before environment variables.
This is the default.
* ``LAST``: Try to use registry after environment variables.
* ``NEVER``: Never try to use registry.
``Python3_FIND_FRAMEWORK``
.. versionadded:: 3.15
On macOS the ``Python3_FIND_FRAMEWORK`` variable determine the order of
preference between Apple-style and unix-style package components.
This variable can take same values as :variable:`CMAKE_FIND_FRAMEWORK`
variable.
.. note::
Value ``ONLY`` is not supported so ``FIRST`` will be used instead.
If ``Python3_FIND_FRAMEWORK`` is not defined, :variable:`CMAKE_FIND_FRAMEWORK`
variable will be used, if any.
``Python3_FIND_VIRTUALENV``
.. versionadded:: 3.15
This variable defines the handling of virtual environments managed by
``virtualenv`` or ``conda``. It is meaningful only when a virtual environment
is active (i.e. the ``activate`` script has been evaluated). In this case, it
takes precedence over ``Python3_FIND_REGISTRY`` and ``CMAKE_FIND_FRAMEWORK``
variables. The ``Python3_FIND_VIRTUALENV`` variable can be set to one of the
following:
* ``FIRST``: The virtual environment is used before any other standard
paths to look-up for the interpreter. This is the default.
* ``ONLY``: Only the virtual environment is used to look-up for the
interpreter.
* ``STANDARD``: The virtual environment is not used to look-up for the
interpreter but environment variable ``PATH`` is always considered.
In this case, variable ``Python3_FIND_REGISTRY`` (Windows) or
``CMAKE_FIND_FRAMEWORK`` (macOS) can be set with value ``LAST`` or
``NEVER`` to select preferably the interpreter from the virtual
environment.
.. versionadded:: 3.17
Added support for ``conda`` environments.
.. note::
If the component ``Development`` is requested, it is **strongly**
recommended to also include the component ``Interpreter`` to get expected
result.
``Python3_FIND_IMPLEMENTATIONS``
.. versionadded:: 3.18
This variable defines, in an ordered list, the different implementations
which will be searched. The ``Python3_FIND_IMPLEMENTATIONS`` variable can
hold the following values:
* ``CPython``: this is the standard implementation. Various products, like
``Anaconda`` or ``ActivePython``, rely on this implementation.
* ``IronPython``: This implementation use the ``CSharp`` language for
``.NET Framework`` on top of the `Dynamic Language Runtime` (``DLR``).
See `IronPython `_.
* ``PyPy``: This implementation use ``RPython`` language and
``RPython translation toolchain`` to produce the python interpreter.
See `PyPy `_.
The default value is:
* Windows platform: ``CPython``, ``IronPython``
* Other platforms: ``CPython``
.. note::
This hint has the lowest priority of all hints, so even if, for example,
you specify ``IronPython`` first and ``CPython`` in second, a python
product based on ``CPython`` can be selected because, for example with
``Python3_FIND_STRATEGY=LOCATION``, each location will be search first for
``IronPython`` and second for ``CPython``.
.. note::
When ``IronPython`` is specified, on platforms other than ``Windows``, the
``.Net`` interpreter (i.e. ``mono`` command) is expected to be available
through the ``PATH`` variable.
``Python3_FIND_UNVERSIONED_NAMES``
.. versionadded:: 3.20
This variable defines how the generic names will be searched. Currently, it
only applies to the generic names of the interpreter, namely, ``python3`` and
``python``.
The ``Python3_FIND_UNVERSIONED_NAMES`` variable can be set to one of the
following values:
* ``FIRST``: The generic names are searched before the more specialized ones
(such as ``python3.5`` for example).
* ``LAST``: The generic names are searched after the more specialized ones.
This is the default.
* ``NEVER``: The generic name are not searched at all.
Artifacts Specification
^^^^^^^^^^^^^^^^^^^^^^^
.. versionadded:: 3.16
To solve special cases, it is possible to specify directly the artifacts by
setting the following variables:
``Python3_EXECUTABLE``
The path to the interpreter.
``Python3_COMPILER``
The path to the compiler.
``Python3_DOTNET_LAUNCHER``
.. versionadded:: 3.18
The ``.Net`` interpreter. Only used by ``IronPython`` implementation.
``Python3_LIBRARY``
The path to the library. It will be used to compute the
variables ``Python3_LIBRARIES``, ``Python3_LIBRARY_DIRS`` and
``Python3_RUNTIME_LIBRARY_DIRS``.
``Python3_INCLUDE_DIR``
The path to the directory of the ``Python`` headers. It will be used to
compute the variable ``Python3_INCLUDE_DIRS``.
``Python3_NumPy_INCLUDE_DIR``
The path to the directory of the ``NumPy`` headers. It will be used to
compute the variable ``Python3_NumPy_INCLUDE_DIRS``.
.. note::
All paths must be absolute. Any artifact specified with a relative path
will be ignored.
.. note::
When an artifact is specified, all ``HINTS`` will be ignored and no search
will be performed for this artifact.
If more than one artifact is specified, it is the user's responsibility to
ensure the consistency of the various artifacts.
By default, this module supports multiple calls in different directories of a
project with different version/component requirements while providing correct
and consistent results for each call. To support this behavior, ``CMake`` cache
is not used in the traditional way which can be problematic for interactive
specification. So, to enable also interactive specification, module behavior
can be controlled with the following variable:
``Python3_ARTIFACTS_INTERACTIVE``
.. versionadded:: 3.18
Selects the behavior of the module. This is a boolean variable:
* If set to ``TRUE``: Create CMake cache entries for the above artifact
specification variables so that users can edit them interactively.
This disables support for multiple version/component requirements.
* If set to ``FALSE`` or undefined: Enable multiple version/component
requirements.
Commands
^^^^^^^^
This module defines the command ``Python3_add_library`` (when
:prop_gbl:`CMAKE_ROLE` is ``PROJECT``), which has the same semantics as
:command:`add_library` and adds a dependency to target ``Python3::Python`` or,
when library type is ``MODULE``, to target ``Python3::Module`` and takes care
of Python module naming rules::
Python3_add_library ( [STATIC | SHARED | MODULE [WITH_SOABI]]
[ ...])
If the library type is not specified, ``MODULE`` is assumed.
.. versionadded:: 3.17
For ``MODULE`` library type, if option ``WITH_SOABI`` is specified, the
module suffix will include the ``Python3_SOABI`` value, if any.
#]=======================================================================]
set (_PYTHON_PREFIX Python3)
set (_Python3_REQUIRED_VERSION_MAJOR 3)
include (${CMAKE_CURRENT_LIST_DIR}/FindPython/Support.cmake)
if (COMMAND __Python3_add_library)
macro (Python3_add_library)
__Python3_add_library (Python3 ${ARGV})
endmacro()
endif()
unset (_PYTHON_PREFIX)
================================================
FILE: scenario/cmake/FindSphinx.cmake
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
# Look for an executable called sphinx-build
find_program(SPHINX_EXECUTABLE
NAMES sphinx-build
DOC "Path to sphinx-build executable")
include(FindPackageHandleStandardArgs)
# Handle standard arguments to find_package like REQUIRED and QUIET
find_package_handle_standard_args(Sphinx
"Failed to find sphinx-build executable"
SPHINX_EXECUTABLE)
================================================
FILE: scenario/cmake/FindSphinxApidoc.cmake
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
# Look for an executable called sphinx-apidoc
find_program(SPHINX_APIDOC_EXECUTABLE
NAMES sphinx-apidoc
DOC "Path to sphinx-apidoc executable")
include(FindPackageHandleStandardArgs)
# Handle standard arguments to find_package like REQUIRED and QUIET
find_package_handle_standard_args(SphinxApidoc
"Failed to find sphinx-apidoc executable"
SPHINX_APIDOC_EXECUTABLE)
================================================
FILE: scenario/cmake/FindSphinxMultiVersion.cmake
================================================
# Copyright (C) 2020 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
# Look for an executable called sphinx-multiversion
find_program(SPHINX_MULTIVERSION_EXECUTABLE
NAMES sphinx-multiversion
DOC "Path to sphinx-multiversion executable")
include(FindPackageHandleStandardArgs)
# Handle standard arguments to find_package like REQUIRED and QUIET
find_package_handle_standard_args(SphinxMultiVersion
"Failed to find sphinx-multiversion executable"
SPHINX_MULTIVERSION_EXECUTABLE)
================================================
FILE: scenario/cmake/ImportTargetsCitadel.cmake
================================================
include(AliasImportedTarget)
# https://ignitionrobotics.org/docs/citadel/install#citadel-libraries
alias_imported_target(
PACKAGE_ORIG sdformat9
PACKAGE_DEST sdformat
TARGETS_ORIG sdformat9
TARGETS_DEST sdformat
NAMESPACE_ORIG sdformat9
NAMESPACE_DEST sdformat
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-gazebo3
PACKAGE_DEST ignition-gazebo
TARGETS_ORIG ignition-gazebo3 core
TARGETS_DEST ignition-gazebo core
NAMESPACE_ORIG ignition-gazebo3
NAMESPACE_DEST ignition-gazebo
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-common3
PACKAGE_DEST ignition-common
TARGETS_ORIG ignition-common3
TARGETS_DEST ignition-common
NAMESPACE_ORIG ignition-common3
NAMESPACE_DEST ignition-common
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-sensors3-all
PACKAGE_DEST ignition-sensors-all
TARGETS_ORIG ignition-sensors3-all
TARGETS_DEST ignition-sensors-all
NAMESPACE_ORIG ignition-sensors3
NAMESPACE_DEST ignition-sensors
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-rendering3
PACKAGE_DEST ignition-rendering
TARGETS_ORIG ignition-rendering3
TARGETS_DEST ignition-rendering
NAMESPACE_ORIG ignition-rendering3
NAMESPACE_DEST ignition-rendering
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-gazebo3-rendering
PACKAGE_DEST ignition-gazebo-rendering
TARGETS_ORIG ignition-gazebo3-rendering
TARGETS_DEST ignition-gazebo-rendering
NAMESPACE_ORIG ignition-gazebo3
NAMESPACE_DEST ignition-gazebo
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-physics2
PACKAGE_DEST ignition-physics
TARGETS_ORIG ignition-physics2
TARGETS_DEST ignition-physics
NAMESPACE_ORIG ignition-physics2
NAMESPACE_DEST ignition-physics
REQUIRED TRUE
)
================================================
FILE: scenario/cmake/ImportTargetsDome.cmake
================================================
include(AliasImportedTarget)
# https://ignitionrobotics.org/docs/dome/install#dome-libraries
alias_imported_target(
PACKAGE_ORIG sdformat10
PACKAGE_DEST sdformat
TARGETS_ORIG sdformat10
TARGETS_DEST sdformat
NAMESPACE_ORIG sdformat10
NAMESPACE_DEST sdformat
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-gazebo4
PACKAGE_DEST ignition-gazebo
TARGETS_ORIG ignition-gazebo4 core
TARGETS_DEST ignition-gazebo core
NAMESPACE_ORIG ignition-gazebo4
NAMESPACE_DEST ignition-gazebo
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-common3
PACKAGE_DEST ignition-common
TARGETS_ORIG ignition-common3
TARGETS_DEST ignition-common
NAMESPACE_ORIG ignition-common3
NAMESPACE_DEST ignition-common
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-sensors4-all
PACKAGE_DEST ignition-sensors-all
TARGETS_ORIG ignition-sensors4-all
TARGETS_DEST ignition-sensors-all
NAMESPACE_ORIG ignition-sensors4
NAMESPACE_DEST ignition-sensors
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-rendering4
PACKAGE_DEST ignition-rendering
TARGETS_ORIG ignition-rendering4
TARGETS_DEST ignition-rendering
NAMESPACE_ORIG ignition-rendering4
NAMESPACE_DEST ignition-rendering
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-gazebo4-rendering
PACKAGE_DEST ignition-gazebo-rendering
TARGETS_ORIG ignition-gazebo4-rendering
TARGETS_DEST ignition-gazebo-rendering
NAMESPACE_ORIG ignition-gazebo4
NAMESPACE_DEST ignition-gazebo
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-physics3
PACKAGE_DEST ignition-physics
TARGETS_ORIG ignition-physics3
TARGETS_DEST ignition-physics
NAMESPACE_ORIG ignition-physics3
NAMESPACE_DEST ignition-physics
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-fuel_tools5
PACKAGE_DEST ignition-fuel_tools
TARGETS_ORIG ignition-fuel_tools5
TARGETS_DEST ignition-fuel_tools
NAMESPACE_ORIG ignition-fuel_tools5
NAMESPACE_DEST ignition-fuel_tools
REQUIRED TRUE
)
================================================
FILE: scenario/cmake/ImportTargetsEdifice.cmake
================================================
include(AliasImportedTarget)
# https://ignitionrobotics.org/docs/edifice/install#edifice-libraries
alias_imported_target(
PACKAGE_ORIG sdformat11
PACKAGE_DEST sdformat
TARGETS_ORIG sdformat11
TARGETS_DEST sdformat
NAMESPACE_ORIG sdformat11
NAMESPACE_DEST sdformat
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-gazebo5
PACKAGE_DEST ignition-gazebo
TARGETS_ORIG ignition-gazebo5 core
TARGETS_DEST ignition-gazebo core
NAMESPACE_ORIG ignition-gazebo5
NAMESPACE_DEST ignition-gazebo
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-common4
PACKAGE_DEST ignition-common
TARGETS_ORIG ignition-common4
TARGETS_DEST ignition-common
NAMESPACE_ORIG ignition-common4
NAMESPACE_DEST ignition-common
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-sensors5-all
PACKAGE_DEST ignition-sensors-all
TARGETS_ORIG ignition-sensors5-all
TARGETS_DEST ignition-sensors-all
NAMESPACE_ORIG ignition-sensors5
NAMESPACE_DEST ignition-sensors
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-rendering5
PACKAGE_DEST ignition-rendering
TARGETS_ORIG ignition-rendering5
TARGETS_DEST ignition-rendering
NAMESPACE_ORIG ignition-rendering5
NAMESPACE_DEST ignition-rendering
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-gazebo5-rendering
PACKAGE_DEST ignition-gazebo-rendering
TARGETS_ORIG ignition-gazebo5-rendering
TARGETS_DEST ignition-gazebo-rendering
NAMESPACE_ORIG ignition-gazebo5
NAMESPACE_DEST ignition-gazebo
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-physics4
PACKAGE_DEST ignition-physics
TARGETS_ORIG ignition-physics4
TARGETS_DEST ignition-physics
NAMESPACE_ORIG ignition-physics4
NAMESPACE_DEST ignition-physics
REQUIRED TRUE
)
================================================
FILE: scenario/cmake/ImportTargetsFortress.cmake
================================================
include(AliasImportedTarget)
# https://ignitionrobotics.org/docs/fortress/install#fortress-libraries
alias_imported_target(
PACKAGE_ORIG sdformat12
PACKAGE_DEST sdformat
TARGETS_ORIG sdformat12
TARGETS_DEST sdformat
NAMESPACE_ORIG sdformat12
NAMESPACE_DEST sdformat
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-gazebo6
PACKAGE_DEST ignition-gazebo
TARGETS_ORIG ignition-gazebo6 core
TARGETS_DEST ignition-gazebo core
NAMESPACE_ORIG ignition-gazebo6
NAMESPACE_DEST ignition-gazebo
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-common4
PACKAGE_DEST ignition-common
TARGETS_ORIG ignition-common4
TARGETS_DEST ignition-common
NAMESPACE_ORIG ignition-common4
NAMESPACE_DEST ignition-common
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-sensors6-all
PACKAGE_DEST ignition-sensors-all
TARGETS_ORIG ignition-sensors6-all
TARGETS_DEST ignition-sensors-all
NAMESPACE_ORIG ignition-sensors6
NAMESPACE_DEST ignition-sensors
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-rendering6
PACKAGE_DEST ignition-rendering
TARGETS_ORIG ignition-rendering6
TARGETS_DEST ignition-rendering
NAMESPACE_ORIG ignition-rendering6
NAMESPACE_DEST ignition-rendering
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-gazebo6-rendering
PACKAGE_DEST ignition-gazebo-rendering
TARGETS_ORIG ignition-gazebo6-rendering
TARGETS_DEST ignition-gazebo-rendering
NAMESPACE_ORIG ignition-gazebo6
NAMESPACE_DEST ignition-gazebo
REQUIRED TRUE
)
alias_imported_target(
PACKAGE_ORIG ignition-physics5
PACKAGE_DEST ignition-physics
TARGETS_ORIG ignition-physics5
TARGETS_DEST ignition-physics
NAMESPACE_ORIG ignition-physics5
NAMESPACE_DEST ignition-physics
REQUIRED TRUE
)
================================================
FILE: scenario/deps/CMakeLists.txt
================================================
# Copyright (C) 2019 Istituto Italiano di Tecnologia (IIT). All rights reserved.
# This software may be modified and distributed under the terms of the
# GNU Lesser General Public License v2.1 or any later version.
include(FetchContent)
# ===
# YCM
# ===
find_package(YCM QUIET)
option(SCENARIO_USE_SYSTEM_YCM
"Use system-installed YCM, rather than a private copy"
${YCM_FOUND})
if(SCENARIO_USE_SYSTEM_YCM AND NOT ${YCM_FOUND})
message(FATAL_ERROR "Failed to find system YCM")
endif()
if(NOT ${SCENARIO_USE_SYSTEM_YCM})
FetchContent_Declare(
ycm
GIT_REPOSITORY https://github.com/robotology/ycm.git)
FetchContent_GetProperties(ycm)
if(NOT ycm_POPULATED)
FetchContent_Populate(ycm)
add_subdirectory(${ycm_SOURCE_DIR}
${ycm_BINARY_DIR}
EXCLUDE_FROM_ALL)
endif()
set(CMAKE_MODULE_PATH
"${CMAKE_MODULE_PATH};${ycm_SOURCE_DIR}/modules" PARENT_SCOPE)
else()
set(CMAKE_MODULE_PATH
"${CMAKE_MODULE_PATH};${YCM_MODULE_PATH}" PARENT_SCOPE)
endif()
# ====================
# TINY-PROCESS-LIBRARY
# ====================
find_package(tiny-process-library QUIET)
option(SCENARIO_USE_SYSTEM_TPL
"Use system-installed tiny-process-library, rather than a private copy"
${tiny-process-library_FOUND})
if(SCENARIO_USE_SYSTEM_TPL AND NOT ${tiny-process-library_FOUND})
message(FATAL_ERROR "Failed to find system tiny-process-library")
endif()
if(NOT ${SCENARIO_USE_SYSTEM_TPL})
FetchContent_Declare(
tinyprocesslibrary
GIT_REPOSITORY https://gitlab.com/eidheim/tiny-process-library.git)
FetchContent_GetProperties(tinyprocesslibrary)
if(NOT tinyprocesslibrary_POPULATED)
FetchContent_Populate(tinyprocesslibrary)
# We don't want to install this library in the system, we instead
# compile it as an OBJECT library and embed in either the shared or
# static libraries that need it.
if(WIN32)
add_library(tiny-process-library OBJECT
${tinyprocesslibrary_SOURCE_DIR}/process.cpp
${tinyprocesslibrary_SOURCE_DIR}/process_win.cpp)
#If compiled using MSYS2, use sh to run commands
if(MSYS)
target_compile_definitions(tiny-process-library
PUBLIC MSYS_PROCESS_USE_SH)
endif()
else()
add_library(tiny-process-library OBJECT
${tinyprocesslibrary_SOURCE_DIR}/process.cpp
${tinyprocesslibrary_SOURCE_DIR}/process_unix.cpp)
endif()
if(MSVC)
target_compile_definitions(tiny-process-library
PRIVATE /D_CRT_SECURE_NO_WARNINGS)
endif()
find_package(Threads REQUIRED)
target_link_libraries(tiny-process-library PRIVATE
${CMAKE_THREAD_LIBS_INIT})
target_include_directories(tiny-process-library PUBLIC
$)
add_library(tiny-process-library::tiny-process-library ALIAS tiny-process-library)
endif()
endif()
# =====
# CLARA
# =====
add_library(Clara INTERFACE)
target_sources(Clara INTERFACE ${CMAKE_CURRENT_SOURCE_DIR}/clara/clara.hpp)
target_include_directories(Clara INTERFACE
$)
================================================
FILE: scenario/deps/clara/clara.hpp
================================================
// Copyright 2017 Two Blue Cubes Ltd. All rights reserved.
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
// See https://github.com/philsquared/Clara for more details
// Clara v1.1.5
#ifndef CLARA_HPP_INCLUDED
#define CLARA_HPP_INCLUDED
#ifndef CLARA_CONFIG_CONSOLE_WIDTH
#define CLARA_CONFIG_CONSOLE_WIDTH 80
#endif
#ifndef CLARA_TEXTFLOW_CONFIG_CONSOLE_WIDTH
#define CLARA_TEXTFLOW_CONFIG_CONSOLE_WIDTH CLARA_CONFIG_CONSOLE_WIDTH
#endif
#ifndef CLARA_CONFIG_OPTIONAL_TYPE
#ifdef __has_include
#if __has_include() && __cplusplus >= 201703L
#include
#define CLARA_CONFIG_OPTIONAL_TYPE std::optional
#endif
#endif
#endif
// ----------- #included from clara_textflow.hpp -----------
// TextFlowCpp
//
// A single-header library for wrapping and laying out basic text, by Phil Nash
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
// This project is hosted at https://github.com/philsquared/textflowcpp
#ifndef CLARA_TEXTFLOW_HPP_INCLUDED
#define CLARA_TEXTFLOW_HPP_INCLUDED
#include
#include
#include
#include
#ifndef CLARA_TEXTFLOW_CONFIG_CONSOLE_WIDTH
#define CLARA_TEXTFLOW_CONFIG_CONSOLE_WIDTH 80
#endif
namespace clara { namespace TextFlow {
inline auto isWhitespace( char c ) -> bool {
static std::string chars = " \t\n\r";
return chars.find( c ) != std::string::npos;
}
inline auto isBreakableBefore( char c ) -> bool {
static std::string chars = "[({<|";
return chars.find( c ) != std::string::npos;
}
inline auto isBreakableAfter( char c ) -> bool {
static std::string chars = "])}>.,:;*+-=&/\\";
return chars.find( c ) != std::string::npos;
}
class Columns;
class Column {
std::vector m_strings;
size_t m_width = CLARA_TEXTFLOW_CONFIG_CONSOLE_WIDTH;
size_t m_indent = 0;
size_t m_initialIndent = std::string::npos;
public:
class iterator {
friend Column;
Column const& m_column;
size_t m_stringIndex = 0;
size_t m_pos = 0;
size_t m_len = 0;
size_t m_end = 0;
bool m_suffix = false;
iterator( Column const& column, size_t stringIndex )
: m_column( column ),
m_stringIndex( stringIndex )
{}
auto line() const -> std::string const& { return m_column.m_strings[m_stringIndex]; }
auto isBoundary( size_t at ) const -> bool {
assert( at > 0 );
assert( at <= line().size() );
return at == line().size() ||
( isWhitespace( line()[at] ) && !isWhitespace( line()[at-1] ) ) ||
isBreakableBefore( line()[at] ) ||
isBreakableAfter( line()[at-1] );
}
void calcLength() {
assert( m_stringIndex < m_column.m_strings.size() );
m_suffix = false;
auto width = m_column.m_width-indent();
m_end = m_pos;
while( m_end < line().size() && line()[m_end] != '\n' )
++m_end;
if( m_end < m_pos + width ) {
m_len = m_end - m_pos;
}
else {
size_t len = width;
while (len > 0 && !isBoundary(m_pos + len))
--len;
while (len > 0 && isWhitespace( line()[m_pos + len - 1] ))
--len;
if (len > 0) {
m_len = len;
} else {
m_suffix = true;
m_len = width - 1;
}
}
}
auto indent() const -> size_t {
auto initial = m_pos == 0 && m_stringIndex == 0 ? m_column.m_initialIndent : std::string::npos;
return initial == std::string::npos ? m_column.m_indent : initial;
}
auto addIndentAndSuffix(std::string const &plain) const -> std::string {
return std::string( indent(), ' ' ) + (m_suffix ? plain + "-" : plain);
}
public:
using difference_type = std::ptrdiff_t;
using value_type = std::string;
using pointer = value_type*;
using reference = value_type&;
using iterator_category = std::forward_iterator_tag;
explicit iterator( Column const& column ) : m_column( column ) {
assert( m_column.m_width > m_column.m_indent );
assert( m_column.m_initialIndent == std::string::npos || m_column.m_width > m_column.m_initialIndent );
calcLength();
if( m_len == 0 )
m_stringIndex++; // Empty string
}
auto operator *() const -> std::string {
assert( m_stringIndex < m_column.m_strings.size() );
assert( m_pos <= m_end );
return addIndentAndSuffix(line().substr(m_pos, m_len));
}
auto operator ++() -> iterator& {
m_pos += m_len;
if( m_pos < line().size() && line()[m_pos] == '\n' )
m_pos += 1;
else
while( m_pos < line().size() && isWhitespace( line()[m_pos] ) )
++m_pos;
if( m_pos == line().size() ) {
m_pos = 0;
++m_stringIndex;
}
if( m_stringIndex < m_column.m_strings.size() )
calcLength();
return *this;
}
auto operator ++(int) -> iterator {
iterator prev( *this );
operator++();
return prev;
}
auto operator ==( iterator const& other ) const -> bool {
return
m_pos == other.m_pos &&
m_stringIndex == other.m_stringIndex &&
&m_column == &other.m_column;
}
auto operator !=( iterator const& other ) const -> bool {
return !operator==( other );
}
};
using const_iterator = iterator;
explicit Column( std::string const& text ) { m_strings.push_back( text ); }
auto width( size_t newWidth ) -> Column& {
assert( newWidth > 0 );
m_width = newWidth;
return *this;
}
auto indent( size_t newIndent ) -> Column& {
m_indent = newIndent;
return *this;
}
auto initialIndent( size_t newIndent ) -> Column& {
m_initialIndent = newIndent;
return *this;
}
auto width() const -> size_t { return m_width; }
auto begin() const -> iterator { return iterator( *this ); }
auto end() const -> iterator { return { *this, m_strings.size() }; }
inline friend std::ostream& operator << ( std::ostream& os, Column const& col ) {
bool first = true;
for( auto line : col ) {
if( first )
first = false;
else
os << "\n";
os << line;
}
return os;
}
auto operator + ( Column const& other ) -> Columns;
auto toString() const -> std::string {
std::ostringstream oss;
oss << *this;
return oss.str();
}
};
class Spacer : public Column {
public:
explicit Spacer( size_t spaceWidth ) : Column( "" ) {
width( spaceWidth );
}
};
class Columns {
std::vector m_columns;
public:
class iterator {
friend Columns;
struct EndTag {};
std::vector const& m_columns;
std::vector m_iterators;
size_t m_activeIterators;
iterator( Columns const& columns, EndTag )
: m_columns( columns.m_columns ),
m_activeIterators( 0 )
{
m_iterators.reserve( m_columns.size() );
for( auto const& col : m_columns )
m_iterators.push_back( col.end() );
}
public:
using difference_type = std::ptrdiff_t;
using value_type = std::string;
using pointer = value_type*;
using reference = value_type&;
using iterator_category = std::forward_iterator_tag;
explicit iterator( Columns const& columns )
: m_columns( columns.m_columns ),
m_activeIterators( m_columns.size() )
{
m_iterators.reserve( m_columns.size() );
for( auto const& col : m_columns )
m_iterators.push_back( col.begin() );
}
auto operator ==( iterator const& other ) const -> bool {
return m_iterators == other.m_iterators;
}
auto operator !=( iterator const& other ) const -> bool {
return m_iterators != other.m_iterators;
}
auto operator *() const -> std::string {
std::string row, padding;
for( size_t i = 0; i < m_columns.size(); ++i ) {
auto width = m_columns[i].width();
if( m_iterators[i] != m_columns[i].end() ) {
std::string col = *m_iterators[i];
row += padding + col;
if( col.size() < width )
padding = std::string( width - col.size(), ' ' );
else
padding = "";
}
else {
padding += std::string( width, ' ' );
}
}
return row;
}
auto operator ++() -> iterator& {
for( size_t i = 0; i < m_columns.size(); ++i ) {
if (m_iterators[i] != m_columns[i].end())
++m_iterators[i];
}
return *this;
}
auto operator ++(int) -> iterator {
iterator prev( *this );
operator++();
return prev;
}
};
using const_iterator = iterator;
auto begin() const -> iterator { return iterator( *this ); }
auto end() const -> iterator { return { *this, iterator::EndTag() }; }
auto operator += ( Column const& col ) -> Columns& {
m_columns.push_back( col );
return *this;
}
auto operator + ( Column const& col ) -> Columns {
Columns combined = *this;
combined += col;
return combined;
}
inline friend std::ostream& operator << ( std::ostream& os, Columns const& cols ) {
bool first = true;
for( auto line : cols ) {
if( first )
first = false;
else
os << "\n";
os << line;
}
return os;
}
auto toString() const -> std::string {
std::ostringstream oss;
oss << *this;
return oss.str();
}
};
inline auto Column::operator + ( Column const& other ) -> Columns {
Columns cols;
cols += *this;
cols += other;
return cols;
}
}}
#endif // CLARA_TEXTFLOW_HPP_INCLUDED
// ----------- end of #include from clara_textflow.hpp -----------
// ........... back in clara.hpp
#include
#include
#include
#if !defined(CLARA_PLATFORM_WINDOWS) && ( defined(WIN32) || defined(__WIN32__) || defined(_WIN32) || defined(_MSC_VER) )
#define CLARA_PLATFORM_WINDOWS
#endif
namespace clara {
namespace detail {
// Traits for extracting arg and return type of lambdas (for single argument lambdas)
template
struct UnaryLambdaTraits : UnaryLambdaTraits {};
template
struct UnaryLambdaTraits {
static const bool isValid = false;
};
template
struct UnaryLambdaTraits {
static const bool isValid = true;
using ArgType = typename std::remove_const::type>::type;
using ReturnType = ReturnT;
};
class TokenStream;
// Transport for raw args (copied from main args, or supplied via init list for testing)
class Args {
friend TokenStream;
std::string m_exeName;
std::vector m_args;
public:
Args( int argc, char const* const* argv )
: m_exeName(argv[0]),
m_args(argv + 1, argv + argc) {}
Args( std::initializer_list args )
: m_exeName( *args.begin() ),
m_args( args.begin()+1, args.end() )
{}
auto exeName() const -> std::string {
return m_exeName;
}
};
// Wraps a token coming from a token stream. These may not directly correspond to strings as a single string
// may encode an option + its argument if the : or = form is used
enum class TokenType {
Option, Argument
};
struct Token {
TokenType type;
std::string token;
};
inline auto isOptPrefix( char c ) -> bool {
return c == '-'
#ifdef CLARA_PLATFORM_WINDOWS
|| c == '/'
#endif
;
}
// Abstracts iterators into args as a stream of tokens, with option arguments uniformly handled
class TokenStream {
using Iterator = std::vector::const_iterator;
Iterator it;
Iterator itEnd;
std::vector m_tokenBuffer;
void loadBuffer() {
m_tokenBuffer.resize( 0 );
// Skip any empty strings
while( it != itEnd && it->empty() )
++it;
if( it != itEnd ) {
auto const &next = *it;
if( isOptPrefix( next[0] ) ) {
auto delimiterPos = next.find_first_of( " :=" );
if( delimiterPos != std::string::npos ) {
m_tokenBuffer.push_back( { TokenType::Option, next.substr( 0, delimiterPos ) } );
m_tokenBuffer.push_back( { TokenType::Argument, next.substr( delimiterPos + 1 ) } );
} else {
if( next[1] != '-' && next.size() > 2 ) {
std::string opt = "- ";
for( size_t i = 1; i < next.size(); ++i ) {
opt[1] = next[i];
m_tokenBuffer.push_back( { TokenType::Option, opt } );
}
} else {
m_tokenBuffer.push_back( { TokenType::Option, next } );
}
}
} else {
m_tokenBuffer.push_back( { TokenType::Argument, next } );
}
}
}
public:
explicit TokenStream( Args const &args ) : TokenStream( args.m_args.begin(), args.m_args.end() ) {}
TokenStream( Iterator it, Iterator itEnd ) : it( it ), itEnd( itEnd ) {
loadBuffer();
}
explicit operator bool() const {
return !m_tokenBuffer.empty() || it != itEnd;
}
auto count() const -> size_t { return m_tokenBuffer.size() + (itEnd - it); }
auto operator*() const -> Token {
assert( !m_tokenBuffer.empty() );
return m_tokenBuffer.front();
}
auto operator->() const -> Token const * {
assert( !m_tokenBuffer.empty() );
return &m_tokenBuffer.front();
}
auto operator++() -> TokenStream & {
if( m_tokenBuffer.size() >= 2 ) {
m_tokenBuffer.erase( m_tokenBuffer.begin() );
} else {
if( it != itEnd )
++it;
loadBuffer();
}
return *this;
}
};
class ResultBase {
public:
enum Type {
Ok, LogicError, RuntimeError
};
protected:
ResultBase( Type type ) : m_type( type ) {}
virtual ~ResultBase() = default;
virtual void enforceOk() const = 0;
Type m_type;
};
template
class ResultValueBase : public ResultBase {
public:
auto value() const -> T const & {
enforceOk();
return m_value;
}
protected:
ResultValueBase( Type type ) : ResultBase( type ) {}
ResultValueBase( ResultValueBase const &other ) : ResultBase( other ) {
if( m_type == ResultBase::Ok )
new( &m_value ) T( other.m_value );
}
ResultValueBase( Type, T const &value ) : ResultBase( Ok ) {
new( &m_value ) T( value );
}
auto operator=( ResultValueBase const &other ) -> ResultValueBase & {
if( m_type == ResultBase::Ok )
m_value.~T();
ResultBase::operator=(other);
if( m_type == ResultBase::Ok )
new( &m_value ) T( other.m_value );
return *this;
}
~ResultValueBase() override {
if( m_type == Ok )
m_value.~T();
}
union {
T m_value;
};
};
template<>
class ResultValueBase : public ResultBase {
protected:
using ResultBase::ResultBase;
};
template
class BasicResult : public ResultValueBase {
public:
template
explicit BasicResult( BasicResult const &other )
: ResultValueBase( other.type() ),
m_errorMessage( other.errorMessage() )
{
assert( type() != ResultBase::Ok );
}
template
static auto ok( U const &value ) -> BasicResult { return { ResultBase::Ok, value }; }
static auto ok() -> BasicResult { return { ResultBase::Ok }; }
static auto logicError( std::string const &message ) -> BasicResult { return { ResultBase::LogicError, message }; }
static auto runtimeError( std::string const &message ) -> BasicResult { return { ResultBase::RuntimeError, message }; }
explicit operator bool() const { return m_type == ResultBase::Ok; }
auto type() const -> ResultBase::Type { return m_type; }
auto errorMessage() const -> std::string { return m_errorMessage; }
protected:
void enforceOk() const override {
// Errors shouldn't reach this point, but if they do
// the actual error message will be in m_errorMessage
assert( m_type != ResultBase::LogicError );
assert( m_type != ResultBase::RuntimeError );
if( m_type != ResultBase::Ok )
std::abort();
}
std::string m_errorMessage; // Only populated if resultType is an error
BasicResult( ResultBase::Type type, std::string const &message )
: ResultValueBase(type),
m_errorMessage(message)
{
assert( m_type != ResultBase::Ok );
}
using ResultValueBase::ResultValueBase;
using ResultBase::m_type;
};
enum class ParseResultType {
Matched, NoMatch, ShortCircuitAll, ShortCircuitSame
};
class ParseState {
public:
ParseState( ParseResultType type, TokenStream const &remainingTokens )
: m_type(type),
m_remainingTokens( remainingTokens )
{}
auto type() const -> ParseResultType { return m_type; }
auto remainingTokens() const -> TokenStream { return m_remainingTokens; }
private:
ParseResultType m_type;
TokenStream m_remainingTokens;
};
using Result = BasicResult;
using ParserResult = BasicResult;
using InternalParseResult = BasicResult;
struct HelpColumns {
std::string left;
std::string right;
};
template
inline auto convertInto( std::string const &source, T& target ) -> ParserResult {
std::stringstream ss;
ss << source;
ss >> target;
if( ss.fail() )
return ParserResult::runtimeError( "Unable to convert '" + source + "' to destination type" );
else
return ParserResult::ok( ParseResultType::Matched );
}
inline auto convertInto( std::string const &source, std::string& target ) -> ParserResult {
target = source;
return ParserResult::ok( ParseResultType::Matched );
}
inline auto convertInto( std::string const &source, bool &target ) -> ParserResult {
std::string srcLC = source;
std::transform( srcLC.begin(), srcLC.end(), srcLC.begin(), []( char c ) { return static_cast( ::tolower(c) ); } );
if (srcLC == "y" || srcLC == "1" || srcLC == "true" || srcLC == "yes" || srcLC == "on")
target = true;
else if (srcLC == "n" || srcLC == "0" || srcLC == "false" || srcLC == "no" || srcLC == "off")
target = false;
else
return ParserResult::runtimeError( "Expected a boolean value but did not recognise: '" + source + "'" );
return ParserResult::ok( ParseResultType::Matched );
}
#ifdef CLARA_CONFIG_OPTIONAL_TYPE
template
inline auto convertInto( std::string const &source, CLARA_CONFIG_OPTIONAL_TYPE& target ) -> ParserResult {
T temp;
auto result = convertInto( source, temp );
if( result )
target = std::move(temp);
return result;
}
#endif // CLARA_CONFIG_OPTIONAL_TYPE
struct NonCopyable {
NonCopyable() = default;
NonCopyable( NonCopyable const & ) = delete;
NonCopyable( NonCopyable && ) = delete;
NonCopyable &operator=( NonCopyable const & ) = delete;
NonCopyable &operator=( NonCopyable && ) = delete;
};
struct BoundRef : NonCopyable {
virtual ~BoundRef() = default;
virtual auto isContainer() const -> bool { return false; }
virtual auto isFlag() const -> bool { return false; }
};
struct BoundValueRefBase : BoundRef {
virtual auto setValue( std::string const &arg ) -> ParserResult = 0;
};
struct BoundFlagRefBase : BoundRef {
virtual auto setFlag( bool flag ) -> ParserResult = 0;
virtual auto isFlag() const -> bool { return true; }
};
template
struct BoundValueRef : BoundValueRefBase {
T &m_ref;
explicit BoundValueRef( T &ref ) : m_ref( ref ) {}
auto setValue( std::string const &arg ) -> ParserResult override {
return convertInto( arg, m_ref );
}
};
template
struct BoundValueRef> : BoundValueRefBase {
std::vector &m_ref;
explicit BoundValueRef( std::vector &ref ) : m_ref( ref ) {}
auto isContainer() const -> bool override { return true; }
auto setValue( std::string const &arg ) -> ParserResult override {
T temp;
auto result = convertInto( arg, temp );
if( result )
m_ref.push_back( temp );
return result;
}
};
struct BoundFlagRef : BoundFlagRefBase {
bool &m_ref;
explicit BoundFlagRef( bool &ref ) : m_ref( ref ) {}
auto setFlag( bool flag ) -> ParserResult override {
m_ref = flag;
return ParserResult::ok( ParseResultType::Matched );
}
};
template
struct LambdaInvoker {
static_assert( std::is_same::value, "Lambda must return void or clara::ParserResult" );
template
static auto invoke( L const &lambda, ArgType const &arg ) -> ParserResult {
return lambda( arg );
}
};
template<>
struct LambdaInvoker {
template
static auto invoke( L const &lambda, ArgType const &arg ) -> ParserResult {
lambda( arg );
return ParserResult::ok( ParseResultType::Matched );
}
};
template
inline auto invokeLambda( L const &lambda, std::string const &arg ) -> ParserResult {
ArgType temp{};
auto result = convertInto( arg, temp );
return !result
? result
: LambdaInvoker::ReturnType>::invoke( lambda, temp );
}
template
struct BoundLambda : BoundValueRefBase {
L m_lambda;
static_assert( UnaryLambdaTraits::isValid, "Supplied lambda must take exactly one argument" );
explicit BoundLambda( L const &lambda ) : m_lambda( lambda ) {}
auto setValue( std::string const &arg ) -> ParserResult override {
return invokeLambda::ArgType>( m_lambda, arg );
}
};
template
struct BoundFlagLambda : BoundFlagRefBase {
L m_lambda;
static_assert( UnaryLambdaTraits::isValid, "Supplied lambda must take exactly one argument" );
static_assert( std::is_same::ArgType, bool>::value, "flags must be boolean" );
explicit BoundFlagLambda( L const &lambda ) : m_lambda( lambda ) {}
auto setFlag( bool flag ) -> ParserResult override {
return LambdaInvoker::ReturnType>::invoke( m_lambda, flag );
}
};
enum class Optionality { Optional, Required };
struct Parser;
class ParserBase {
public:
virtual ~ParserBase() = default;
virtual auto validate() const -> Result { return Result::ok(); }
virtual auto parse( std::string const& exeName, TokenStream const &tokens) const -> InternalParseResult = 0;
virtual auto cardinality() const -> size_t { return 1; }
auto parse( Args const &args ) const -> InternalParseResult {
return parse( args.exeName(), TokenStream( args ) );
}
};
template
class ComposableParserImpl : public ParserBase {
public:
template
auto operator|( T const &other ) const -> Parser;
template
auto operator+( T const &other ) const -> Parser;
};
// Common code and state for Args and Opts
template
class ParserRefImpl : public ComposableParserImpl {
protected:
Optionality m_optionality = Optionality::Optional;
std::shared_ptr m_ref;
std::string m_hint;
std::string m_description;
explicit ParserRefImpl( std::shared_ptr const &ref ) : m_ref( ref ) {}
public:
template
ParserRefImpl( T &ref, std::string const &hint )
: m_ref( std::make_shared>( ref ) ),
m_hint( hint )
{}
template
ParserRefImpl( LambdaT const &ref, std::string const &hint )
: m_ref( std::make_shared>( ref ) ),
m_hint(hint)
{}
auto operator()( std::string const &description ) -> DerivedT & {
m_description = description;
return static_cast( *this );
}
auto optional() -> DerivedT & {
m_optionality = Optionality::Optional;
return static_cast( *this );
};
auto required() -> DerivedT & {
m_optionality = Optionality::Required;
return static_cast( *this );
};
auto isOptional() const -> bool {
return m_optionality == Optionality::Optional;
}
auto cardinality() const -> size_t override {
if( m_ref->isContainer() )
return 0;
else
return 1;
}
auto hint() const -> std::string { return m_hint; }
};
class ExeName : public ComposableParserImpl {
std::shared_ptr m_name;
std::shared_ptr m_ref;
template
static auto makeRef(LambdaT const &lambda) -> std::shared_ptr {
return std::make_shared>( lambda) ;
}
public:
ExeName() : m_name( std::make_shared( "" ) ) {}
explicit ExeName( std::string &ref ) : ExeName() {
m_ref = std::make_shared>( ref );
}
template
explicit ExeName( LambdaT const& lambda ) : ExeName() {
m_ref = std::make_shared>( lambda );
}
// The exe name is not parsed out of the normal tokens, but is handled specially
auto parse( std::string const&, TokenStream const &tokens ) const -> InternalParseResult override {
return InternalParseResult::ok( ParseState( ParseResultType::NoMatch, tokens ) );
}
auto name() const -> std::string { return *m_name; }
auto set( std::string const& newName ) -> ParserResult {
auto lastSlash = newName.find_last_of( "\\/" );
auto filename = ( lastSlash == std::string::npos )
? newName
: newName.substr( lastSlash+1 );
*m_name = filename;
if( m_ref )
return m_ref->setValue( filename );
else
return ParserResult::ok( ParseResultType::Matched );
}
};
class Arg : public ParserRefImpl {
public:
using ParserRefImpl::ParserRefImpl;
auto parse( std::string const &, TokenStream const &tokens ) const -> InternalParseResult override {
auto validationResult = validate();
if( !validationResult )
return InternalParseResult( validationResult );
auto remainingTokens = tokens;
auto const &token = *remainingTokens;
if( token.type != TokenType::Argument )
return InternalParseResult::ok( ParseState( ParseResultType::NoMatch, remainingTokens ) );
assert( !m_ref->isFlag() );
auto valueRef = static_cast( m_ref.get() );
auto result = valueRef->setValue( remainingTokens->token );
if( !result )
return InternalParseResult( result );
else
return InternalParseResult::ok( ParseState( ParseResultType::Matched, ++remainingTokens ) );
}
};
inline auto normaliseOpt( std::string const &optName ) -> std::string {
#ifdef CLARA_PLATFORM_WINDOWS
if( optName[0] == '/' )
return "-" + optName.substr( 1 );
else
#endif
return optName;
}
class Opt : public ParserRefImpl {
protected:
std::vector m_optNames;
public:
template
explicit Opt( LambdaT const &ref ) : ParserRefImpl( std::make_shared>( ref ) ) {}
explicit Opt( bool &ref ) : ParserRefImpl( std::make_shared