Repository: kpeeters/cadabra2 Branch: master Commit: 0afcc652660a Files: 1369 Total size: 21.6 MB Directory structure: gitextract_noaq8p8m/ ├── .editorconfig ├── .gitattributes ├── .github/ │ └── workflows/ │ ├── appimage-modern.yml │ ├── c++lib.yml │ ├── docker.yml │ ├── fedora-40-package.yml │ ├── fedora-41-package.yml │ ├── fedora-42-package.yml │ ├── freebsd.yml │ ├── homebrew-devel.yml │ ├── homebrew.yml │ ├── linux.yml │ ├── macos.yml │ ├── opensuse-tumbleweed-package.yml │ ├── tarball.yml │ ├── ubuntu-22.04-package.yml │ ├── ubuntu-24.04-package.yml │ ├── windows-installer.yml │ └── windows.yml ├── .gitignore ├── .gitmodules ├── .travis.yml ├── CITATION.cff ├── CMakeLists.txt ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── JUPYTER.rst ├── LICENSE ├── Makefile ├── README.rst ├── c++lib/ │ ├── .gitignore │ ├── CMakeLists.txt │ ├── README.txt │ ├── SympyDummy.cc │ ├── adjform.cc │ ├── cpplib.hh.in │ ├── nevaluate.cc │ ├── nevaluate.py │ ├── simple.cc │ └── trivial.cc ├── client_server/ │ ├── Actions.cc │ ├── Actions.hh │ ├── CMakeLists.txt │ ├── ComputeThread.cc │ ├── ComputeThread.hh │ ├── DocumentThread.cc │ ├── DocumentThread.hh │ ├── GUIBase.hh │ ├── ScriptThread.cc │ ├── ScriptThread.hh │ ├── Server.cc │ ├── Server.hh │ ├── Snoop.cc │ ├── Snoop.hh │ ├── TODO │ ├── cadabra-jupyter-kernel.cc │ ├── cadabra-jupyter-kernel.hh │ ├── cadabra-jupyter-main.cc │ ├── cadabra-server.cc │ ├── cadabra2html.cc │ ├── cadabra2latex.cc │ ├── connection.json │ ├── kernel.json │ ├── notebook.html │ ├── notebook.tex │ ├── popen2.cc │ ├── popen2.hh │ ├── regexp_tester.cc │ ├── test_client.cc │ ├── test_talk_to_server.cc │ ├── tree.hh │ ├── websocket_client.cc │ ├── websocket_client.hh │ ├── websocket_server.cc │ └── websocket_server.hh ├── cmake/ │ ├── cmake_uninstall.cmake.in │ ├── functions.cmake │ ├── modules/ │ │ ├── FindGLIBMM3.cmake │ │ ├── FindGLIBMM4.cmake │ │ ├── FindGMPXX.cmake │ │ ├── FindGTKMM3.cmake │ │ ├── FindGTKMM4.cmake │ │ ├── FindJSONCPP.cmake │ │ ├── FindLibPythonOSX.py │ │ ├── FindMathematica.cmake │ │ ├── FindMathematicaDocumentationBuild.cmake.in │ │ ├── FindMathematicaTestDriver.cmd │ │ ├── FindMathematicaTestDriver.sh │ │ ├── FindPythonLibsOSX.cmake │ │ ├── FindSQLITE3.cmake │ │ ├── FindZeroMQ.cmake │ │ └── cotire.cmake │ ├── packaging.cmake │ ├── policies.cmake │ ├── version.cmake │ └── windows.cmake ├── codemeta.json ├── conda/ │ ├── build.sh │ └── meta.yaml ├── config/ │ ├── AppRun │ ├── Doxyfile │ ├── DoxygenLayout.xml │ ├── DoxygenStyle.css │ ├── README.txt │ ├── buildbot.sh │ ├── buildpkg.sh │ ├── doxyrest-config.lua │ ├── generate_keywords.py │ ├── init-cadabra2.scm │ ├── install_python_windows.cmake.in │ ├── install_script.iss.in │ ├── make.bat │ ├── post_install.rtf │ ├── postinst.in │ ├── pre_install.rtf.in │ ├── publish-doxygen │ ├── science.cadabra.cadabra2-gtk.desktop.in │ ├── shortcuts.wxs │ ├── travisci_rsa.enc │ └── travisci_rsa.pub ├── contrib/ │ ├── einstein_equations.cnb │ └── structure_equations_and_bianchi.cnb ├── core/ │ ├── .gitignore │ ├── Adjform.cc │ ├── Adjform.hh │ ├── Algorithm.cc │ ├── Algorithm.hh │ ├── Bridge.cc │ ├── Bridge.hh │ ├── CMakeLists.txt │ ├── CdbPython.cc │ ├── CdbPython.hh │ ├── Cleanup.cc │ ├── Cleanup.hh │ ├── Combinatorics.cc │ ├── Combinatorics.hh │ ├── Compare.cc │ ├── Compare.hh │ ├── Config.hh.in │ ├── DataCell.cc │ ├── DataCell.hh │ ├── Debug.hh │ ├── DisplayBase.cc │ ├── DisplayBase.hh │ ├── DisplayMMA.cc │ ├── DisplayMMA.hh │ ├── DisplaySympy.cc │ ├── DisplaySympy.hh │ ├── DisplayTeX.cc │ ├── DisplayTeX.hh │ ├── DisplayTerminal.cc │ ├── DisplayTerminal.hh │ ├── Dummies.hh │ ├── Equals.cc │ ├── Equals.hh │ ├── ExManip.cc │ ├── ExManip.hh │ ├── ExNode.cc │ ├── ExNode.hh │ ├── Exceptions.cc │ ├── Exceptions.hh │ ├── Exchange.cc │ ├── Exchange.hh │ ├── Functional.cc │ ├── Functional.hh │ ├── Grouping.cc │ ├── Grouping.hh │ ├── Hash.cc │ ├── Hash.hh │ ├── IndexClassifier.cc │ ├── IndexClassifier.hh │ ├── IndexIterator.cc │ ├── IndexIterator.hh │ ├── InstallPrefix.cc │ ├── InstallPrefix.hh │ ├── Kernel.cc │ ├── Kernel.hh │ ├── Linear.cc │ ├── Linear.hh │ ├── MMACdb.cc │ ├── MMACdb.hh │ ├── Media.cc │ ├── Media.hh │ ├── MultiIndex.hh │ ├── Multiplier.cc │ ├── Multiplier.hh │ ├── NDSolver.cc │ ├── NDSolver.hh │ ├── NEvaluator.cc │ ├── NEvaluator.hh │ ├── NIntegrator.cc │ ├── NIntegrator.hh │ ├── NInterpolatingFunction.cc │ ├── NInterpolatingFunction.hh │ ├── NTensor.cc │ ├── NTensor.hh │ ├── Parser.cc │ ├── Parser.hh │ ├── Permutations.hh │ ├── PreClean.cc │ ├── PreClean.hh │ ├── PreProcessor.cc │ ├── PreProcessor.hh │ ├── ProgressMonitor.cc │ ├── ProgressMonitor.hh │ ├── Props.cc │ ├── Props.hh │ ├── PythonException.cc │ ├── PythonException.hh │ ├── ReservedNode.cc │ ├── ReservedNode.hh │ ├── Stopwatch.cc │ ├── Stopwatch.hh │ ├── Storage.cc │ ├── Storage.hh │ ├── Sum.cc │ ├── Sum.hh │ ├── Symbols.cc │ ├── Symbols.hh │ ├── SympyCdb.cc │ ├── SympyCdb.hh │ ├── TerminalStream.cc │ ├── TerminalStream.hh │ ├── YoungTab.cc │ ├── YoungTab.hh │ ├── algorithms/ │ │ ├── all_contractions.tex │ │ ├── asym.cnb │ │ ├── canonicalise.cc │ │ ├── canonicalise.cnb │ │ ├── canonicalise.hh │ │ ├── canonicalorder.tex │ │ ├── coefficients.tex │ │ ├── collect_components.cc │ │ ├── collect_components.hh │ │ ├── collect_factors.cc │ │ ├── collect_factors.cnb │ │ ├── collect_factors.hh │ │ ├── collect_terms.cc │ │ ├── collect_terms.cnb │ │ ├── collect_terms.hh │ │ ├── combine.cc │ │ ├── combine.cnb │ │ ├── combine.hh │ │ ├── complete.cc │ │ ├── complete.cnb │ │ ├── complete.hh │ │ ├── component.hh │ │ ├── decompose.cc │ │ ├── decompose.cnb │ │ ├── decompose.hh │ │ ├── decompose_product.cc │ │ ├── decompose_product.cnb │ │ ├── decompose_product.hh │ │ ├── depprint.tex │ │ ├── distribute.cc │ │ ├── distribute.cnb │ │ ├── distribute.hh │ │ ├── drop_weight.cc │ │ ├── drop_weight.cnb │ │ ├── drop_weight.hh │ │ ├── dualise_tensor.tex │ │ ├── einsteinify.cc │ │ ├── einsteinify.cnb │ │ ├── einsteinify.hh │ │ ├── eliminate_kronecker.cc │ │ ├── eliminate_kronecker.cnb │ │ ├── eliminate_kronecker.hh │ │ ├── eliminate_metric.cc │ │ ├── eliminate_metric.cnb │ │ ├── eliminate_metric.hh │ │ ├── eliminate_vielbein.cc │ │ ├── eliminate_vielbein.cnb │ │ ├── eliminate_vielbein.hh │ │ ├── eliminate_vielbein.tex │ │ ├── eliminateeps.tex │ │ ├── epsilon_to_delta.cc │ │ ├── epsilon_to_delta.cnb │ │ ├── epsilon_to_delta.hh │ │ ├── evaluate.cc │ │ ├── evaluate.cnb │ │ ├── evaluate.hh │ │ ├── expand.cc │ │ ├── expand.cnb │ │ ├── expand.hh │ │ ├── expand_delta.cc │ │ ├── expand_delta.cnb │ │ ├── expand_delta.hh │ │ ├── expand_diracbar.cc │ │ ├── expand_diracbar.cnb │ │ ├── expand_diracbar.hh │ │ ├── expand_dummies.cc │ │ ├── expand_dummies.cnb │ │ ├── expand_dummies.hh │ │ ├── expand_power.cc │ │ ├── expand_power.cnb │ │ ├── expand_power.hh │ │ ├── expand_product_shorthand.tex │ │ ├── explicit_indices.cc │ │ ├── explicit_indices.cnb │ │ ├── explicit_indices.hh │ │ ├── factor_in.cc │ │ ├── factor_in.cnb │ │ ├── factor_in.hh │ │ ├── factor_out.cc │ │ ├── factor_out.cnb │ │ ├── factor_out.hh │ │ ├── fierz.cc │ │ ├── fierz.cnb │ │ ├── fierz.hh │ │ ├── first_order_form.cc │ │ ├── first_order_form.hh │ │ ├── flatten_product.cc │ │ ├── flatten_product.hh │ │ ├── flatten_sum.cc │ │ ├── flatten_sum.hh │ │ ├── impose_asym.tex │ │ ├── impose_bianchi.tex │ │ ├── index_rename.tex │ │ ├── indexlist.tex │ │ ├── indexsort.cc │ │ ├── indexsort.hh │ │ ├── indexsort.tex │ │ ├── inner.tex │ │ ├── integrate_by_parts.cc │ │ ├── integrate_by_parts.cnb │ │ ├── integrate_by_parts.hh │ │ ├── join_gamma.cc │ │ ├── join_gamma.cnb │ │ ├── join_gamma.hh │ │ ├── keep_terms.cc │ │ ├── keep_terms.hh │ │ ├── keep_terms.tex │ │ ├── keep_weight.cnb │ │ ├── list_sum.tex │ │ ├── listflatten.tex │ │ ├── lower_free_indices.cc │ │ ├── lower_free_indices.cnb │ │ ├── lower_free_indices.hh │ │ ├── lr_tensor.cc │ │ ├── lr_tensor.cnb │ │ ├── lr_tensor.hh │ │ ├── lsolve.tex │ │ ├── map_mma.cc │ │ ├── map_mma.hh │ │ ├── map_sympy.cc │ │ ├── map_sympy.cnb │ │ ├── map_sympy.hh │ │ ├── meld.cc │ │ ├── meld.cnb │ │ ├── meld.hh │ │ ├── ndsolve.cnb │ │ ├── nevaluate.cc │ │ ├── nevaluate.cnb │ │ ├── nevaluate.hh │ │ ├── nval.cc │ │ ├── nval.cnb │ │ ├── nval.hh │ │ ├── order.cc │ │ ├── order.hh │ │ ├── order.tex │ │ ├── permute.tex │ │ ├── product_rule.cc │ │ ├── product_rule.cnb │ │ ├── product_rule.hh │ │ ├── product_shorthand.tex │ │ ├── projweyl.tex │ │ ├── properties.tex │ │ ├── proplist.tex │ │ ├── raise_free_indices.cnb │ │ ├── range.tex │ │ ├── reduce.tex │ │ ├── reduce_delta.cc │ │ ├── reduce_delta.cnb │ │ ├── reduce_delta.hh │ │ ├── remove_indexbracket.tex │ │ ├── remove_weyl_traces.tex │ │ ├── rename_dummies.cc │ │ ├── rename_dummies.cnb │ │ ├── rename_dummies.hh │ │ ├── replace_match.cc │ │ ├── replace_match.cnb │ │ ├── replace_match.hh │ │ ├── rewrite_indices.cc │ │ ├── rewrite_indices.cnb │ │ ├── rewrite_indices.hh │ │ ├── simplify.cc │ │ ├── simplify.cnb │ │ ├── simplify.hh │ │ ├── slot_asym.cnb │ │ ├── sort_product.cc │ │ ├── sort_product.cnb │ │ ├── sort_product.hh │ │ ├── sort_spinors.cc │ │ ├── sort_spinors.cnb │ │ ├── sort_spinors.hh │ │ ├── sort_sum.cc │ │ ├── sort_sum.cnb │ │ ├── sort_sum.hh │ │ ├── split.cc │ │ ├── split.hh │ │ ├── split_gamma.cc │ │ ├── split_gamma.cnb │ │ ├── split_gamma.hh │ │ ├── split_index.cc │ │ ├── split_index.cnb │ │ ├── split_index.hh │ │ ├── substitute.cc │ │ ├── substitute.cnb │ │ ├── substitute.hh │ │ ├── sumflatten.tex │ │ ├── sym.cc │ │ ├── sym.hh │ │ ├── sym.tex │ │ ├── tab_basics.cc │ │ ├── tab_basics.hh │ │ ├── tab_dimension.cc │ │ ├── tab_dimension.hh │ │ ├── tabcanonicalise.tex │ │ ├── tabdimension.tex │ │ ├── tabstandardform.tex │ │ ├── take_match.cc │ │ ├── take_match.cnb │ │ ├── take_match.hh │ │ ├── tree.tex │ │ ├── unique_indices.tex │ │ ├── untrace.cc │ │ ├── untrace.cnb │ │ ├── untrace.hh │ │ ├── unwrap.cc │ │ ├── unwrap.cnb │ │ ├── unwrap.hh │ │ ├── unzoom.cc │ │ ├── unzoom.hh │ │ ├── vary.cc │ │ ├── vary.cnb │ │ ├── vary.hh │ │ ├── weyl_index_order.tex │ │ ├── young_project.cc │ │ ├── young_project.hh │ │ ├── young_project.tex │ │ ├── young_project_product.cc │ │ ├── young_project_product.cnb │ │ ├── young_project_product.hh │ │ ├── young_project_tensor.cc │ │ ├── young_project_tensor.cnb │ │ ├── young_project_tensor.hh │ │ ├── zoom.cc │ │ ├── zoom.cnb │ │ └── zoom.hh │ ├── cadabra2-cli.cc │ ├── cadabra2-cli.hh │ ├── cadabra2.in │ ├── cadabra2_defaults.py.in │ ├── cadabra2cadabra.cc │ ├── cadabra2ipynb.cc │ ├── cadabra2python.cc │ ├── cdb-nbtool.cc │ ├── echokernel.py │ ├── lru_cache.hh │ ├── modules/ │ │ ├── Lie.cc │ │ ├── xperm_new.cc │ │ └── xperm_new.h │ ├── packages/ │ │ ├── CMakeLists.txt │ │ └── cdb/ │ │ ├── core/ │ │ │ ├── _component.cc │ │ │ ├── component.cnb │ │ │ ├── manip.cnb │ │ │ ├── solve.cnb │ │ │ └── trace.cnb │ │ ├── gauge_theory/ │ │ │ └── instantons.cnb │ │ ├── graphics/ │ │ │ └── plot.cnb │ │ ├── interact/ │ │ │ └── slider.cnb │ │ ├── main.py │ │ ├── numeric/ │ │ │ ├── evaluate.cnb │ │ │ └── integrate.cnb │ │ ├── relativity/ │ │ │ ├── __init__.cdb │ │ │ ├── abstract.cnb │ │ │ └── schwarzschild.cnb │ │ ├── remote/ │ │ │ ├── __init__.py │ │ │ ├── highlight.py │ │ │ ├── record.py │ │ │ └── speech.py │ │ ├── sympy/ │ │ │ ├── calculus.cnb │ │ │ └── solvers.cnb │ │ └── utils/ │ │ ├── _algorithm.cc │ │ ├── develop.cnb │ │ ├── indices.cnb │ │ ├── node.cnb │ │ ├── tableau.cnb │ │ └── types.cnb │ ├── passing.cc │ ├── properties/ │ │ ├── Accent.cc │ │ ├── Accent.cnb │ │ ├── Accent.hh │ │ ├── Accent.tex │ │ ├── AntiCommuting.cc │ │ ├── AntiCommuting.cnb │ │ ├── AntiCommuting.hh │ │ ├── AntiCommuting.tex │ │ ├── AntiSelfDual.tex │ │ ├── AntiSymmetric.cc │ │ ├── AntiSymmetric.cnb │ │ ├── AntiSymmetric.hh │ │ ├── AntiSymmetric.tex │ │ ├── Commuting.cc │ │ ├── Commuting.cnb │ │ ├── Commuting.hh │ │ ├── Commuting.tex │ │ ├── CommutingAsProduct.cc │ │ ├── CommutingAsProduct.cnb │ │ ├── CommutingAsProduct.hh │ │ ├── CommutingAsProduct.tex │ │ ├── CommutingAsSum.cc │ │ ├── CommutingAsSum.cnb │ │ ├── CommutingAsSum.hh │ │ ├── CommutingAsSum.tex │ │ ├── CommutingBehaviour.cc │ │ ├── CommutingBehaviour.hh │ │ ├── Coordinate.cc │ │ ├── Coordinate.cnb │ │ ├── Coordinate.hh │ │ ├── Coordinate.tex │ │ ├── DAntiSymmetric.cc │ │ ├── DAntiSymmetric.cnb │ │ ├── DAntiSymmetric.hh │ │ ├── DAntiSymmetric.tex │ │ ├── Depends.cc │ │ ├── Depends.cnb │ │ ├── Depends.hh │ │ ├── Depends.tex │ │ ├── DependsBase.hh │ │ ├── DependsInherit.cc │ │ ├── DependsInherit.hh │ │ ├── DependsInherit.tex │ │ ├── Derivative.cc │ │ ├── Derivative.cnb │ │ ├── Derivative.hh │ │ ├── Derivative.tex │ │ ├── DerivativeOp.cc │ │ ├── DerivativeOp.hh │ │ ├── Determinant.cc │ │ ├── Determinant.cnb │ │ ├── Determinant.hh │ │ ├── Diagonal.cc │ │ ├── Diagonal.cnb │ │ ├── Diagonal.hh │ │ ├── Diagonal.tex │ │ ├── DifferentialForm.cc │ │ ├── DifferentialForm.hh │ │ ├── DifferentialFormBase.hh │ │ ├── DiracBar.cc │ │ ├── DiracBar.cnb │ │ ├── DiracBar.hh │ │ ├── DiracBar.tex │ │ ├── Distributable.cc │ │ ├── Distributable.cnb │ │ ├── Distributable.hh │ │ ├── Distributable.tex │ │ ├── EpsilonTensor.cc │ │ ├── EpsilonTensor.cnb │ │ ├── EpsilonTensor.hh │ │ ├── EpsilonTensor.tex │ │ ├── ExteriorDerivative.cc │ │ ├── ExteriorDerivative.hh │ │ ├── FilledTableau.cc │ │ ├── FilledTableau.cnb │ │ ├── FilledTableau.hh │ │ ├── FilledTableau.tex │ │ ├── GammaMatrix.cc │ │ ├── GammaMatrix.cnb │ │ ├── GammaMatrix.hh │ │ ├── GammaMatrix.tex │ │ ├── GammaTraceless.cc │ │ ├── GammaTraceless.hh │ │ ├── GammaTraceless.tex │ │ ├── ImaginaryI.cc │ │ ├── ImaginaryI.hh │ │ ├── ImplicitIndex.cc │ │ ├── ImplicitIndex.cnb │ │ ├── ImplicitIndex.hh │ │ ├── ImplicitIndex.tex │ │ ├── IndexInherit.cc │ │ ├── IndexInherit.cnb │ │ ├── IndexInherit.hh │ │ ├── IndexInherit.tex │ │ ├── Indices.cc │ │ ├── Indices.cnb │ │ ├── Indices.hh │ │ ├── Indices.tex │ │ ├── Integer.cc │ │ ├── Integer.cnb │ │ ├── Integer.hh │ │ ├── Integer.tex │ │ ├── Integral.hh │ │ ├── InverseMetric.cc │ │ ├── InverseMetric.cnb │ │ ├── InverseMetric.hh │ │ ├── InverseMetric.tex │ │ ├── InverseVielbein.cnb │ │ ├── InverseVielbein.tex │ │ ├── KeepHistory.tex │ │ ├── KroneckerDelta.cc │ │ ├── KroneckerDelta.cnb │ │ ├── KroneckerDelta.hh │ │ ├── KroneckerDelta.tex │ │ ├── LaTeXForm.cc │ │ ├── LaTeXForm.cnb │ │ ├── LaTeXForm.hh │ │ ├── LaTeXForm.tex │ │ ├── Matrix.cc │ │ ├── Matrix.hh │ │ ├── Matrix.tex │ │ ├── Metric.cc │ │ ├── Metric.cnb │ │ ├── Metric.hh │ │ ├── Metric.tex │ │ ├── NonCommuting.cc │ │ ├── NonCommuting.cnb │ │ ├── NonCommuting.hh │ │ ├── NonCommuting.tex │ │ ├── NumericalFlat.cc │ │ ├── NumericalFlat.hh │ │ ├── NumericalFlat.tex │ │ ├── PartialDerivative.cc │ │ ├── PartialDerivative.cnb │ │ ├── PartialDerivative.hh │ │ ├── PartialDerivative.tex │ │ ├── PostDefaultRules.tex │ │ ├── PreDefaultRules.tex │ │ ├── PropertyInherit.tex │ │ ├── RiemannTensor.cc │ │ ├── RiemannTensor.cnb │ │ ├── RiemannTensor.hh │ │ ├── RiemannTensor.tex │ │ ├── SatisfiesBianchi.cc │ │ ├── SatisfiesBianchi.cnb │ │ ├── SatisfiesBianchi.hh │ │ ├── SatisfiesBianchi.tex │ │ ├── SelfAntiCommuting.cc │ │ ├── SelfAntiCommuting.cnb │ │ ├── SelfAntiCommuting.hh │ │ ├── SelfAntiCommuting.tex │ │ ├── SelfCommuting.cc │ │ ├── SelfCommuting.cnb │ │ ├── SelfCommuting.hh │ │ ├── SelfCommuting.tex │ │ ├── SelfCommutingBehaviour.hh │ │ ├── SelfDual.tex │ │ ├── SelfNonCommuting.cc │ │ ├── SelfNonCommuting.cnb │ │ ├── SelfNonCommuting.hh │ │ ├── SelfNonCommuting.tex │ │ ├── SigmaBarMatrix.tex │ │ ├── SigmaMatrix.hh │ │ ├── SigmaMatrix.tex │ │ ├── SortOrder.cc │ │ ├── SortOrder.cnb │ │ ├── SortOrder.hh │ │ ├── SortOrder.tex │ │ ├── Spinor.cc │ │ ├── Spinor.cnb │ │ ├── Spinor.hh │ │ ├── Spinor.tex │ │ ├── Symbol.cc │ │ ├── Symbol.cnb │ │ ├── Symbol.hh │ │ ├── Symmetric.cc │ │ ├── Symmetric.cnb │ │ ├── Symmetric.hh │ │ ├── Symmetric.tex │ │ ├── Tableau.cc │ │ ├── Tableau.cnb │ │ ├── Tableau.hh │ │ ├── TableauBase.cc │ │ ├── TableauBase.hh │ │ ├── TableauInherit.cc │ │ ├── TableauInherit.hh │ │ ├── TableauSymmetry.cc │ │ ├── TableauSymmetry.cnb │ │ ├── TableauSymmetry.hh │ │ ├── TableauSymmetry.tex │ │ ├── Trace.cc │ │ ├── Trace.cnb │ │ ├── Trace.hh │ │ ├── Traceless.cc │ │ ├── Traceless.hh │ │ ├── Traceless.tex │ │ ├── Vielbein.cc │ │ ├── Vielbein.cnb │ │ ├── Vielbein.hh │ │ ├── Vielbein.tex │ │ ├── Weight.cc │ │ ├── Weight.cnb │ │ ├── Weight.hh │ │ ├── WeightBase.hh │ │ ├── WeightInherit.cc │ │ ├── WeightInherit.cnb │ │ ├── WeightInherit.hh │ │ ├── WeylTensor.cc │ │ ├── WeylTensor.hh │ │ └── WeylTensor.tex │ ├── pythoncdb/ │ │ ├── py_algorithms.cc │ │ ├── py_algorithms.hh │ │ ├── py_ex.cc │ │ ├── py_ex.hh │ │ ├── py_globals.cc │ │ ├── py_globals.hh │ │ ├── py_helpers.cc │ │ ├── py_helpers.hh │ │ ├── py_kernel.cc │ │ ├── py_kernel.hh │ │ ├── py_media.cc │ │ ├── py_media.hh │ │ ├── py_module.cc │ │ ├── py_ntensor.cc │ │ ├── py_ntensor.hh │ │ ├── py_packages.cc │ │ ├── py_packages.hh │ │ ├── py_progress.cc │ │ ├── py_progress.hh │ │ ├── py_properties.cc │ │ ├── py_properties.hh │ │ ├── py_stopwatch.cc │ │ ├── py_stopwatch.hh │ │ ├── py_tableau.cc │ │ └── py_tableau.hh │ ├── test_benchmark.cc │ ├── test_compile_command.py │ ├── test_internals.cc │ ├── test_multiindex.cc │ ├── test_multiplier.cc │ ├── test_permutations.cc │ ├── test_preprocessor.cc │ ├── test_wstp.cc │ └── tree.hh ├── doc/ │ ├── .gitignore │ ├── adjacency_form.md │ ├── autogobble.sty │ ├── cadabra2.tex │ ├── cadabra2_hep.tex │ ├── description │ ├── license.txt │ ├── main.md │ ├── modules.dox │ ├── random.md │ ├── reserved/ │ │ ├── anticommutator.tex │ │ ├── arrow.tex │ │ ├── cdot.tex │ │ ├── comma.tex │ │ ├── commutator.tex │ │ ├── conditional.tex │ │ ├── equals.tex │ │ ├── expression.tex │ │ ├── factorial.tex │ │ ├── frac.tex │ │ ├── indexbracket.tex │ │ ├── infty.tex │ │ ├── label.tex │ │ ├── matrix.tex │ │ ├── pow.tex │ │ ├── prod.tex │ │ ├── regex.tex │ │ ├── sequence.tex │ │ ├── sum.tex │ │ └── unequals.tex │ ├── tableaux.sty │ ├── the_cadabra_book.bib │ ├── the_cadabra_book.tex │ ├── users/ │ │ ├── command_line.tex │ │ ├── comparison.tex │ │ ├── components.tex │ │ ├── input.tex │ │ └── notebook_comparisons.tex │ └── writing_algorithms.tex ├── docker/ │ ├── Dockerfile │ └── entrypoint.sh ├── examples/ │ ├── .gitignore │ ├── auto_meld.cnb │ ├── automatic_multiterm.cnb │ ├── beginners.cnb │ ├── bianchi_identities.cnb │ ├── canonicalise.cnb │ ├── cell_ids.cnb │ ├── component_evaluation.cnb │ ├── components2.cnb │ ├── converge.cnb │ ├── covariant_derivative.cdb │ ├── equations_of_motion.cnb │ ├── exterior.cnb │ ├── fermionic_oscillator_algebra.cnb │ ├── fierz.cnb │ ├── for_previous_users.cnb │ ├── frw.cnb │ ├── gamma_matrix_algebra.cnb │ ├── gamma_traces.cnb │ ├── graphical_user_interface.cnb │ ├── ho.cnb │ ├── indexing_expressions.cnb │ ├── input_format.cnb │ ├── kaluza_klein.cnb │ ├── kerr.cnb │ ├── library.cnb │ ├── lovelock.cnb │ ├── nintegrate.cnb │ ├── numerics.cnb │ ├── packages.cnb │ ├── packages2.cnb │ ├── plotting.cnb │ ├── poincare_algebra.cnb │ ├── post_processing.cnb │ ├── quickstart.cnb │ ├── ref_accents.cnb │ ├── ref_c++_library.cnb │ ├── ref_core_package.cnb │ ├── ref_default_simplification.cnb │ ├── ref_derivatives.cnb │ ├── ref_dynamical_updates.cnb │ ├── ref_exponents.cnb │ ├── ref_flags_variables.cnb │ ├── ref_implicit_versus_explicit.cnb │ ├── ref_import.cnb │ ├── ref_indexbrackets.cnb │ ├── ref_indices.cnb │ ├── ref_kernel.cnb │ ├── ref_ndsolve.cnb │ ├── ref_numerical.cnb │ ├── ref_ordering.cnb │ ├── ref_patterns.cnb │ ├── ref_plotting.cnb │ ├── ref_printing.cnb │ ├── ref_programming.cnb │ ├── ref_properties.cnb │ ├── ref_selecting.cnb │ ├── ref_spacing.cnb │ ├── ref_sympy.cnb │ ├── reset.cnb │ ├── sample_dyn.cnb │ ├── scalar_manipulations.cnb │ ├── scalar_manipulations2.cnb │ ├── schwarzschild.cnb │ ├── schwarzschild.ipynb │ ├── simple_evaluate.cnb │ ├── slider.cnb │ ├── sphere.cnb │ ├── spinors.cnb │ ├── string_states.cnb │ ├── super_maxwell.cnb │ ├── supergravity.cnb │ ├── sympy_bridge.cnb │ ├── sympy_examples.cnb │ ├── tensor_monomials.cnb │ ├── tensors_in_denominators.cnb │ ├── typesetting.cnb │ ├── utf8.cnb │ ├── vacuum_einstein_first_order.cnb │ ├── variational_derivatives.cnb │ ├── working_with_ex.cnb │ └── world-sheet_susy.cnb ├── frontend/ │ ├── CMakeLists.txt │ ├── common/ │ │ ├── CMakeLists.txt │ │ ├── TeXEngine.cc │ │ ├── TeXEngine.hh │ │ ├── lodepng.cc │ │ ├── lodepng.h │ │ ├── preamble.tex │ │ ├── test_tex.cc │ │ └── testpre.tex │ ├── gtkmm/ │ │ ├── CMakeLists.txt │ │ ├── Cadabra.cc │ │ ├── Cadabra.hh │ │ ├── ChooseColoursDialog.cc │ │ ├── ChooseColoursDialog.hh │ │ ├── CodeInput.cc │ │ ├── CodeInput.hh │ │ ├── Console.cc │ │ ├── Console.hh │ │ ├── DiffViewer.cc │ │ ├── DiffViewer.hh │ │ ├── ImageView.cc │ │ ├── ImageView.hh │ │ ├── Keywords.cc │ │ ├── Keywords.hh │ │ ├── NotebookCanvas.cc │ │ ├── NotebookCanvas.hh │ │ ├── NotebookWindow.cc │ │ ├── NotebookWindow.hh │ │ ├── Preferences.hh │ │ ├── SelectFileDialog.cc │ │ ├── SelectFileDialog.hh │ │ ├── SliderView.cc │ │ ├── SliderView.hh │ │ ├── TeXView.cc │ │ ├── TeXView.hh │ │ ├── VisualCell.hh │ │ ├── cadabra2-gtk.appdata.xml.in │ │ ├── cdb-icons/ │ │ │ └── README.md │ │ ├── config/ │ │ │ ├── gschemas.compiled │ │ │ └── settings.ini │ │ ├── icons/ │ │ │ ├── Adwaita/ │ │ │ │ ├── cursors/ │ │ │ │ │ ├── 00008160000006810000408080010102.cur │ │ │ │ │ ├── 028006030e0e7ebffc7f7070c0600140.cur │ │ │ │ │ ├── 03b6e0fcb3499374a867c041f52298f0.cur │ │ │ │ │ ├── 08e8e1c95fe2fc01f976f1e063a24ccd.ani │ │ │ │ │ ├── 1081e37283d90000800003c07f3ef6bf.cur │ │ │ │ │ ├── 14fef782d02440884392942c11205230.cur │ │ │ │ │ ├── 2870a09082c103050810ffdffffe0204.cur │ │ │ │ │ ├── 3085a0e285430894940527032f8b26df.cur │ │ │ │ │ ├── 3ecb610c1bf2410f44200f48c40d3599.ani │ │ │ │ │ ├── 4498f0e0c1937ffe01fd06f973665830.cur │ │ │ │ │ ├── 5c6cd98b3f3ebcb1f9c7f1c204630408.cur │ │ │ │ │ ├── 6407b0e94181790501fd1e167b474872.cur │ │ │ │ │ ├── 640fb0e74195791501fd1ed57b41487f.cur │ │ │ │ │ ├── 9081237383d90e509aa00f00170e968f.cur │ │ │ │ │ ├── 9d800788f1b08800ae810202380a0822.cur │ │ │ │ │ ├── X_cursor.cur │ │ │ │ │ ├── alias.cur │ │ │ │ │ ├── all-scroll.cur │ │ │ │ │ ├── arrow.cur │ │ │ │ │ ├── bd_double_arrow.cur │ │ │ │ │ ├── bottom_left_corner.cur │ │ │ │ │ ├── bottom_right_corner.cur │ │ │ │ │ ├── bottom_side.cur │ │ │ │ │ ├── bottom_tee.cur │ │ │ │ │ ├── c7088f0f3e6c8088236ef8e1e3e70000.cur │ │ │ │ │ ├── cell.cur │ │ │ │ │ ├── circle.cur │ │ │ │ │ ├── col-resize.cur │ │ │ │ │ ├── context-menu.cur │ │ │ │ │ ├── copy.cur │ │ │ │ │ ├── cross.cur │ │ │ │ │ ├── cross_reverse.cur │ │ │ │ │ ├── crossed_circle.cur │ │ │ │ │ ├── crosshair.cur │ │ │ │ │ ├── d9ce0ab605698f320427677b458ad60b.cur │ │ │ │ │ ├── default.cur │ │ │ │ │ ├── diamond_cross.cur │ │ │ │ │ ├── dnd-ask.cur │ │ │ │ │ ├── dnd-copy.cur │ │ │ │ │ ├── dnd-link.cur │ │ │ │ │ ├── dnd-move.cur │ │ │ │ │ ├── dnd-no-drop.cur │ │ │ │ │ ├── dnd-none.cur │ │ │ │ │ ├── dot_box_mask.cur │ │ │ │ │ ├── dotbox.cur │ │ │ │ │ ├── double_arrow.cur │ │ │ │ │ ├── draft_large.cur │ │ │ │ │ ├── draft_small.cur │ │ │ │ │ ├── draped_box.cur │ │ │ │ │ ├── e-resize.cur │ │ │ │ │ ├── e29285e634086352946a0e7090d73106.cur │ │ │ │ │ ├── ew-resize.cur │ │ │ │ │ ├── fcf1c3c7cd4491d801f1e1c78f100000.cur │ │ │ │ │ ├── fd_double_arrow.cur │ │ │ │ │ ├── fleur.cur │ │ │ │ │ ├── grab.cur │ │ │ │ │ ├── grabbing.cur │ │ │ │ │ ├── h_double_arrow.cur │ │ │ │ │ ├── hand.cur │ │ │ │ │ ├── hand1.cur │ │ │ │ │ ├── hand2.cur │ │ │ │ │ ├── help.cur │ │ │ │ │ ├── icon.cur │ │ │ │ │ ├── left_ptr.cur │ │ │ │ │ ├── left_ptr_help.cur │ │ │ │ │ ├── left_ptr_watch.ani │ │ │ │ │ ├── left_side.cur │ │ │ │ │ ├── left_tee.cur │ │ │ │ │ ├── link.cur │ │ │ │ │ ├── ll_angle.cur │ │ │ │ │ ├── lr_angle.cur │ │ │ │ │ ├── move.cur │ │ │ │ │ ├── n-resize.cur │ │ │ │ │ ├── ne-resize.cur │ │ │ │ │ ├── nesw-resize.cur │ │ │ │ │ ├── no-drop.cur │ │ │ │ │ ├── not-allowed.cur │ │ │ │ │ ├── ns-resize.cur │ │ │ │ │ ├── nw-resize.cur │ │ │ │ │ ├── nwse-resize.cur │ │ │ │ │ ├── pencil.cur │ │ │ │ │ ├── pirate.cur │ │ │ │ │ ├── plus.cur │ │ │ │ │ ├── pointer-move.cur │ │ │ │ │ ├── pointer.cur │ │ │ │ │ ├── progress.ani │ │ │ │ │ ├── question_arrow.cur │ │ │ │ │ ├── right_ptr.cur │ │ │ │ │ ├── right_side.cur │ │ │ │ │ ├── right_tee.cur │ │ │ │ │ ├── row-resize.cur │ │ │ │ │ ├── s-resize.cur │ │ │ │ │ ├── sb_down_arrow.cur │ │ │ │ │ ├── sb_h_double_arrow.cur │ │ │ │ │ ├── sb_left_arrow.cur │ │ │ │ │ ├── sb_right_arrow.cur │ │ │ │ │ ├── sb_up_arrow.cur │ │ │ │ │ ├── sb_v_double_arrow.cur │ │ │ │ │ ├── se-resize.cur │ │ │ │ │ ├── size_all.cur │ │ │ │ │ ├── size_bdiag.cur │ │ │ │ │ ├── size_fdiag.cur │ │ │ │ │ ├── size_hor.cur │ │ │ │ │ ├── size_ver.cur │ │ │ │ │ ├── sw-resize.cur │ │ │ │ │ ├── target.cur │ │ │ │ │ ├── tcross.cur │ │ │ │ │ ├── text.cur │ │ │ │ │ ├── top_left_arrow.cur │ │ │ │ │ ├── top_left_corner.cur │ │ │ │ │ ├── top_right_corner.cur │ │ │ │ │ ├── top_side.cur │ │ │ │ │ ├── top_tee.cur │ │ │ │ │ ├── ul_angle.cur │ │ │ │ │ ├── ur_angle.cur │ │ │ │ │ ├── v_double_arrow.cur │ │ │ │ │ ├── vertical-text.cur │ │ │ │ │ ├── w-resize.cur │ │ │ │ │ ├── wait.ani │ │ │ │ │ ├── watch.ani │ │ │ │ │ ├── xterm.cur │ │ │ │ │ ├── zoom-in.cur │ │ │ │ │ └── zoom-out.cur │ │ │ │ ├── icon-theme.cache │ │ │ │ └── index.theme │ │ │ ├── README.txt │ │ │ └── hicolor/ │ │ │ └── icon-theme.cache │ │ ├── main.cc │ │ ├── science.cadabra.cadabra2-gtk.appdata.xml.in │ │ ├── theme/ │ │ │ ├── README.txt │ │ │ └── Windows10/ │ │ │ └── gtk-3.20/ │ │ │ ├── apps/ │ │ │ │ └── gnome-terminal.css │ │ │ ├── gtk-cadabra.css │ │ │ ├── gtk-contained-dark.css │ │ │ ├── gtk-contained.css │ │ │ ├── gtk-dark.css │ │ │ ├── gtk.css │ │ │ └── settings.ini │ │ └── win_res.rc.in │ ├── latex/ │ │ ├── install.cmake │ │ ├── tableaux.sty │ │ └── young.html │ ├── osx/ │ │ ├── CMakeLists.txt │ │ ├── Cadabra/ │ │ │ ├── Cadabra/ │ │ │ │ ├── AppDelegate.h │ │ │ │ ├── AppDelegate.mm │ │ │ │ ├── Base.lproj/ │ │ │ │ │ └── Cadabra.xib │ │ │ │ ├── Images.xcassets/ │ │ │ │ │ └── AppIcon.appiconset/ │ │ │ │ │ └── Contents.json │ │ │ │ ├── Info.plist │ │ │ │ ├── Notebook.xib │ │ │ │ ├── NotebookCanvas.hh │ │ │ │ ├── NotebookCanvas.mm │ │ │ │ ├── NotebookWindow.hh │ │ │ │ ├── NotebookWindow.mm │ │ │ │ ├── Test.xib │ │ │ │ └── main.m │ │ │ ├── Cadabra.xcodeproj/ │ │ │ │ ├── project.pbxproj │ │ │ │ ├── project.xcworkspace/ │ │ │ │ │ ├── contents.xcworkspacedata │ │ │ │ │ ├── xcshareddata/ │ │ │ │ │ │ └── Cadabra.xccheckout │ │ │ │ │ └── xcuserdata/ │ │ │ │ │ └── kasper.xcuserdatad/ │ │ │ │ │ └── UserInterfaceState.xcuserstate │ │ │ │ └── xcuserdata/ │ │ │ │ └── kasper.xcuserdatad/ │ │ │ │ ├── .gitignore │ │ │ │ └── xcschemes/ │ │ │ │ ├── Cadabra.xcscheme │ │ │ │ └── xcschememanagement.plist │ │ │ ├── CadabraTests/ │ │ │ │ ├── CadabraTests.m │ │ │ │ └── Info.plist │ │ │ ├── NotebookController.h │ │ │ └── NotebookController.mm │ │ └── fake.cc │ ├── qt5/ │ │ └── README.txt │ └── web/ │ ├── CMakeLists.txt │ ├── Makefile │ ├── README.md │ ├── css/ │ │ └── cadabra.css │ ├── docker/ │ │ ├── .gitignore │ │ └── Dockerfile │ ├── html/ │ │ └── index.html │ ├── js/ │ │ └── cadabra.js │ └── src/ │ ├── NotebookWindow.cc │ ├── NotebookWindow.hh │ └── server.py ├── jupyterkernel/ │ ├── .gitignore │ ├── CMakeLists.txt │ ├── cadabra2_jupyter/ │ │ ├── __init__.py.in │ │ ├── __main__.py │ │ ├── completer.py │ │ ├── context.py │ │ ├── kernel.py │ │ └── server.py │ ├── kernelspec/ │ │ └── kernel.json.in │ ├── lexer/ │ │ ├── cadabra.js │ │ └── cadabra.py │ └── readme.txt ├── libs/ │ ├── appdirs/ │ │ └── cdb_appdirs.py │ ├── base64/ │ │ ├── base64.cc │ │ └── base64.hh │ ├── cm/ │ │ ├── cmunbbx.clm1 │ │ ├── cmunbbx.otf │ │ ├── cmunbi.clm1 │ │ ├── cmunbi.otf │ │ ├── cmunbl.clm1 │ │ ├── cmunbl.otf │ │ ├── cmunbmo.clm1 │ │ ├── cmunbmo.otf │ │ ├── cmunbmr.clm1 │ │ ├── cmunbmr.otf │ │ ├── cmunbso.clm1 │ │ ├── cmunbso.otf │ │ ├── cmunbsr.clm1 │ │ ├── cmunbsr.otf │ │ ├── cmunbtl.clm1 │ │ ├── cmunbtl.otf │ │ ├── cmunbto.clm1 │ │ ├── cmunbto.otf │ │ ├── cmunbx.clm1 │ │ ├── cmunbx.otf │ │ ├── cmunbxo.clm1 │ │ ├── cmunbxo.otf │ │ ├── cmunci.clm1 │ │ ├── cmunci.otf │ │ ├── cmunit.clm1 │ │ ├── cmunit.otf │ │ ├── cmunobi.clm1 │ │ ├── cmunobi.otf │ │ ├── cmunobx.clm1 │ │ ├── cmunobx.otf │ │ ├── cmunorm.clm1 │ │ ├── cmunorm.otf │ │ ├── cmunoti.clm1 │ │ ├── cmunoti.otf │ │ ├── cmunrb.clm1 │ │ ├── cmunrb.otf │ │ ├── cmunrm.clm1 │ │ ├── cmunrm.otf │ │ ├── cmunsi.clm1 │ │ ├── cmunsi.otf │ │ ├── cmunsl.clm1 │ │ ├── cmunsl.otf │ │ ├── cmunso.clm1 │ │ ├── cmunso.otf │ │ ├── cmunss.clm1 │ │ ├── cmunss.otf │ │ ├── cmunssdc.clm1 │ │ ├── cmunssdc.otf │ │ ├── cmunst.clm1 │ │ ├── cmunst.otf │ │ ├── cmunsx.clm1 │ │ ├── cmunsx.otf │ │ ├── cmuntb.clm1 │ │ ├── cmuntb.otf │ │ ├── cmunti.clm1 │ │ ├── cmunti.otf │ │ ├── cmuntt.clm1 │ │ ├── cmuntt.otf │ │ ├── cmuntx.clm1 │ │ ├── cmuntx.otf │ │ ├── cmunui.clm1 │ │ ├── cmunui.otf │ │ ├── cmunvi.clm1 │ │ ├── cmunvi.otf │ │ ├── cmunvt.clm1 │ │ └── cmunvt.otf │ ├── dbg/ │ │ └── dbg.h │ ├── internal/ │ │ └── include/ │ │ └── internal/ │ │ ├── difflib.h │ │ ├── string_tools.h │ │ ├── uniconv.h │ │ ├── unistd.h │ │ └── uuid.h │ ├── linenoise/ │ │ ├── LICENSE │ │ └── linenoise.hpp │ ├── nlohmann/ │ │ └── nlohmann/ │ │ └── json.hpp │ ├── pybind11/ │ │ ├── CMakeLists.txt │ │ ├── LICENSE │ │ ├── include/ │ │ │ └── pybind11/ │ │ │ ├── attr.h │ │ │ ├── buffer_info.h │ │ │ ├── cast.h │ │ │ ├── chrono.h │ │ │ ├── common.h │ │ │ ├── complex.h │ │ │ ├── detail/ │ │ │ │ ├── class.h │ │ │ │ ├── common.h │ │ │ │ ├── cpp_conduit.h │ │ │ │ ├── descr.h │ │ │ │ ├── exception_translation.h │ │ │ │ ├── init.h │ │ │ │ ├── internals.h │ │ │ │ ├── type_caster_base.h │ │ │ │ ├── typeid.h │ │ │ │ └── value_and_holder.h │ │ │ ├── eigen/ │ │ │ │ ├── common.h │ │ │ │ ├── matrix.h │ │ │ │ └── tensor.h │ │ │ ├── eigen.h │ │ │ ├── embed.h │ │ │ ├── eval.h │ │ │ ├── functional.h │ │ │ ├── gil.h │ │ │ ├── gil_safe_call_once.h │ │ │ ├── iostream.h │ │ │ ├── numpy.h │ │ │ ├── operators.h │ │ │ ├── options.h │ │ │ ├── pybind11.h │ │ │ ├── pytypes.h │ │ │ ├── stl/ │ │ │ │ └── filesystem.h │ │ │ ├── stl.h │ │ │ ├── stl_bind.h │ │ │ ├── type_caster_pyobject_ptr.h │ │ │ └── typing.h │ │ ├── pybind11/ │ │ │ ├── __init__.py │ │ │ ├── __main__.py │ │ │ ├── _version.py │ │ │ ├── commands.py │ │ │ ├── py.typed │ │ │ └── setup_helpers.py │ │ └── tools/ │ │ ├── FindCatch.cmake │ │ ├── FindEigen3.cmake │ │ ├── FindPythonLibsNew.cmake │ │ ├── JoinPaths.cmake │ │ ├── check-style.sh │ │ ├── cmake_uninstall.cmake.in │ │ ├── codespell_ignore_lines_from_errors.py │ │ ├── libsize.py │ │ ├── make_changelog.py │ │ ├── pybind11.pc.in │ │ ├── pybind11Common.cmake │ │ ├── pybind11Config.cmake.in │ │ ├── pybind11GuessPythonExtSuffix.cmake │ │ ├── pybind11NewTools.cmake │ │ ├── pybind11Tools.cmake │ │ ├── pyproject.toml │ │ ├── setup_global.py.in │ │ ├── setup_main.py.in │ │ └── test-pybind11GuessPythonExtSuffix.cmake │ ├── sqlite3/ │ │ ├── include/ │ │ │ └── sqlite3.h │ │ └── sqlite3.c │ ├── tiny-process-library/ │ │ ├── LICENSE │ │ ├── process.cpp │ │ ├── process.hpp │ │ ├── process_unix.cpp │ │ └── process_win.cpp │ ├── tinyxml2/ │ │ ├── CMakeLists.txt │ │ ├── cmake/ │ │ │ ├── tinyxml2-config.cmake │ │ │ └── tinyxml2.pc.in │ │ ├── tinyxml2.cpp │ │ └── tinyxml2.h │ └── whereami/ │ ├── LICENSE.WTFPLv2 │ ├── whereami.c │ └── whereami.h ├── man/ │ └── man1/ │ ├── cadabra-server.1 │ ├── cadabra2-cli.1 │ ├── cadabra2-gtk.1 │ ├── cadabra2.1 │ ├── cadabra2cadabra.1 │ ├── cadabra2html.1 │ ├── cadabra2ipynb.1 │ ├── cadabra2latex.1 │ └── cadabra2python.1 ├── paper/ │ ├── paper.bib │ └── paper.md ├── tests/ │ ├── .gitignore │ ├── CMakeLists.txt │ ├── algebra.cdb │ ├── basic.cdb │ ├── callbacks.cdb │ ├── canonicalise.cdb │ ├── components.cdb │ ├── decompose.cdb │ ├── decompose.cnb │ ├── delta.cdb │ ├── derivative.cdb │ ├── display.cdb │ ├── display.cnb │ ├── dummies.cdb │ ├── explicit_implicit.cdb │ ├── factor.cdb │ ├── field_theory.cdb │ ├── fierz.cdb │ ├── fixed_point.cdb │ ├── forms.cdb │ ├── gamma.cdb │ ├── gamma_paper.cdb │ ├── implicit.cdb │ ├── index_positions.cdb │ ├── integrals.cdb │ ├── ipynb_module.ipynb │ ├── kaluza_klein.cdb │ ├── kerr.cnb │ ├── latexform.cnb │ ├── manip.cdb │ ├── meld.cdb │ ├── mma.cdb │ ├── mma.cnb │ ├── module01.cnb │ ├── module02.cnb │ ├── module03.cdb │ ├── modules.cdb │ ├── multiterm.cdb │ ├── nevaluate.cdb │ ├── new_paper.cdb │ ├── noncovariant.cdb │ ├── numerical.cdb │ ├── output.cdb │ ├── packages.cdb │ ├── packages.cnb │ ├── paper.cdb │ ├── programming.cdb │ ├── properties.cdb │ ├── reduce.cdb │ ├── relativity.cdb │ ├── scope.cdb │ ├── selecting.cdb │ ├── semicolon-is-display.cnb │ ├── serialize.cdb │ ├── simplify.cdb │ ├── spinors.cdb │ ├── substitute.cdb │ ├── symmetry.cdb │ ├── sympy_bridge.cnb │ ├── sympy_cdb.cdb │ ├── test_comparison.cc │ ├── tests.cdb │ ├── trigonometric.cdb │ ├── unicode.cdb │ ├── vary.cdb │ ├── working.cdb │ ├── young.cdb │ └── yrtrace.cdb ├── tutorials/ │ └── 01_basics.py ├── vcpkg.json └── web2/ ├── CMakeLists.txt ├── README.txt ├── cadabra2/ │ ├── .gitignore │ ├── robots.txt │ └── source/ │ ├── blog.html │ ├── changelog.html │ ├── changes12.html │ ├── clay.yaml │ ├── comparison.html │ ├── developers.html │ ├── download.html │ ├── faq.html │ ├── features.html │ ├── help.html │ ├── index.html │ ├── jupyter.html │ ├── layout.html │ ├── license.html │ ├── man.html │ ├── manual/ │ │ ├── .gitignore │ │ └── README.txt │ ├── notebook_layout.html │ ├── notebooks/ │ │ ├── .gitignore │ │ └── README.txt │ ├── papers.html │ ├── people.html │ ├── quickstart.html │ ├── static/ │ │ ├── cadabra_in_ipython.nb │ │ ├── cadabra_in_ipython.nb.html │ │ ├── fonts/ │ │ │ ├── Bright/ │ │ │ │ ├── OFL-FAQ.txt │ │ │ │ ├── OFL.txt │ │ │ │ ├── README.txt │ │ │ │ └── cmun-bright.css │ │ │ ├── cmunbx.otf │ │ │ └── cmunrm.otf │ │ ├── humans.txt │ │ ├── images/ │ │ │ └── logo.tex │ │ ├── js/ │ │ │ └── cadabra.js │ │ ├── robots.txt │ │ └── styles/ │ │ ├── cadabra-web.css │ │ └── normalize.css │ ├── tutorials.html │ ├── user_notebooks.html │ └── v1x.html └── scan.py ================================================ FILE CONTENTS ================================================ ================================================ FILE: .editorconfig ================================================ root = true [*] end_of_line = lf insert_final_newline = true # Matches multiple files with brace expansion notation [*.{cc,hh}] charset = utf-8 indent_style = tab indent_size = 3 trim_trailing_whitespace = true ================================================ FILE: .gitattributes ================================================ *.hh linguist-language=C++ *.cc linguist-language=C++ libs/** linguist-vendored ================================================ FILE: .github/workflows/appimage-modern.yml ================================================ # Modern AppImage build using AppImageBuilder # Supports both x86_64 and arm64 architectures on current GitHub runners # Uses Ubuntu 22.04 base for maximum compatibility name: AppImage (Modern) on: release: types: [created] # Uncomment for testing #push: # branches: [devel] jobs: build: name: AppImage ${{ matrix.arch }} strategy: fail-fast: false matrix: include: - arch: x86_64 os: ubuntu-24.04 ubuntu_arch: amd64 appimage_arch: x86_64 repo: http://archive.ubuntu.com/ubuntu/ - arch: arm64 os: ubuntu-24.04-arm ubuntu_arch: arm64 appimage_arch: aarch64 repo: http://ports.ubuntu.com/ubuntu-ports/ runs-on: ${{ matrix.os }} steps: - name: Checkout repository uses: actions/checkout@v4 - name: Install AppImageBuilder run: | sudo apt update sudo apt install -y python3-pip - name: Clean up any existing AppDir run: | sudo rm -rf AppDir || true - name: Create AppImageBuilder recipe run: | cat > AppImageBuilder.yml << 'EOF' version: 1 script: - mkdir -p AppDir AppDir: path: ./AppDir app_info: id: science.cadabra.cadabra2-gtk name: Cadabra2 icon: cadabra2-gtk version: latest exec: usr/bin/cadabra2-gtk exec_args: $@ apt: arch: ${{ matrix.ubuntu_arch }} sources: - sourceline: deb ${{ matrix.repo }} jammy main universe key_url: 'http://keyserver.ubuntu.com/pks/lookup?op=get&search=0x871920D1991BC93C' - sourceline: deb ${{ matrix.repo }} jammy-updates main universe key_url: 'http://keyserver.ubuntu.com/pks/lookup?op=get&search=0x871920D1991BC93C' include: # Core system libraries - libc6 - libstdc++6 - libgcc-s1 # Shell interpreters - bash - dash - perl-base # Cadabra dependencies - libgmp10 - libgmpxx4ldbl - libboost-system1.74.0 - libboost-filesystem1.74.0 - libboost-program-options1.74.0 - libboost-date-time1.74.0 - libsqlite3-0 - uuid-runtime - libssl3 - libtbb12 # GTK and GUI dependencies - libgtkmm-3.0-1v5 - libgtk-3-0 - libgdk-pixbuf2.0-0 - libcairo2 - libpango-1.0-0 - libpangocairo-1.0-0 - libatk1.0-0 - libglib2.0-0 - librsvg2-common - adwaita-icon-theme - hicolor-icon-theme - libwayland-client0 - libwayland-cursor0 - libwayland-egl1-mesa # Python dependencies (Ubuntu 22.04 uses Python 3.10) - python3.10 - libpython3.10 - python3-pip - python3-gmpy2 exclude: - adwaita-icon-theme-full - humanity-icon-theme - ubuntu-mono files: exclude: - usr/lib/python*/site-packages/pip* - usr/lib/python*/site-packages/setuptools* - usr/share/doc - usr/share/man - usr/share/locale - var/cache - var/lib/apt - etc/apt runtime: env: PATH: '${APPDIR}/usr/bin:${PATH}' PYTHONHOME: '${APPDIR}/usr' PYTHONPATH: '${APPDIR}/usr/lib/python3.10/site-packages:${APPDIR}/usr/lib/python3.10:${APPDIR}/usr/lib/python3.10/dist-packages' LD_LIBRARY_PATH: '${APPDIR}/usr/lib:${APPDIR}/usr/lib/${{ matrix.arch }}-linux-gnu:${LD_LIBRARY_PATH}' LC_ALL: C.UTF-8 LANG: C.UTF-8 AppImage: update-information: gh-releases-zsync|kpeeters|cadabra2|latest|Cadabra*${{ matrix.arch }}.AppImage.zsync sign-key: None arch: ${{ matrix.appimage_arch }} comp: gzip EOF - name: Build Cadabra2 in container run: | # Use Docker to build for the target architecture with Ubuntu 22.04 docker run --rm --privileged \ --platform linux/${{ matrix.arch }} \ -v $PWD:/workspace \ -w /workspace \ ubuntu:22.04 /bin/bash -c " # Stop on error inside the Docker container set -e # Install dependencies apt update DEBIAN_FRONTEND=noninteractive apt install -y \ build-essential cmake git \ python3-dev python3-pip g++ \ libgmp3-dev libgtkmm-3.0-dev \ libboost-all-dev libssl-dev \ libsqlite3-dev uuid-dev \ python3-matplotlib python3-sympy \ python3-gmpy2 python3-numpy \ squashfs-tools file desktop-file-utils fakeroot strace patchelf zsync # Install directly from the main branch of the appimage-builder repository, # to deal with the issue reported in # https://github.com/AppImageCrafters/appimage-builder/pull/281 pip3 install git+https://github.com/AppImageCrafters/appimage-builder.git@main # Patch mpmath (bug fixed only in ubuntu-22.04) # sed -i \"s/if other is 0:/if other == 0:/g\" /usr/lib/python3/dist-packages/mpmath/ctx_mp_python.py # cat /usr/lib/python3/dist-packages/mpmath/ctx_mp_python.py # Build Cadabra2 git config --global --add safe.directory /workspace mkdir -p build cd build cmake -DCMAKE_INSTALL_PREFIX=/usr \ -DCMAKE_BUILD_TYPE=Release \ -DENABLE_MATHEMATICA=OFF \ -DAPPIMAGE_MODE=ON \ .. VERSION=\$(cat /workspace/build/VERSION) echo \"Building version \${VERSION}\" make -j\$(nproc) make install DESTDIR=/workspace/build/AppDir make test # Install Python packages directly into the AppDir after 'make install' # Ensure the target path is correct for your AppDir's Python installation pip3 install --target=/workspace/build/AppDir/usr/lib/python3.10/site-packages --upgrade mpmath>=1.2.0 pip3 install --target=/workspace/build/AppDir/usr/lib/python3.10/site-packages astunparse pip3 install --target=/workspace/build/AppDir/usr/lib/python3.10/site-packages pillow sympy matplotlib numpy # Update version in AppImageBuilder.yml sed -i \"s/version: latest/version: \${VERSION}/\" /workspace/AppImageBuilder.yml # Build the AppImage echo \"Now going to run appimage-builder, using:\" echo \"---------\" cat /workspace/AppImageBuilder.yml echo \"---------\" echo \"Here we go...\" appimage-builder --recipe /workspace/AppImageBuilder.yml --skip-test echo \"This is /workspace/build/AppDir/usr/bin/ :\" echo \"---------\" ls -la /workspace/build/AppDir/usr/bin/ echo \"---------\" " - name: Rename AppImage run: | VERSION=`cat build/VERSION` sudo chown runner:docker build -R # Find and rename the AppImage ls -la . APPIMAGE_FILE=$(find . -name "*.AppImage" -type f | head -n 1) if [ -n "$APPIMAGE_FILE" ]; then mv "$APPIMAGE_FILE" "Cadabra_${VERSION}_${{ matrix.arch }}.AppImage" chmod +x "Cadabra_${VERSION}_${{ matrix.arch }}.AppImage" fi - name: Upload artifacts uses: actions/upload-artifact@v4 with: name: cadabra2-appimage-${{ matrix.arch }} path: Cadabra_*_${{ matrix.arch }}.AppImage retention-days: 7 - name: Upload to release if: github.event_name == 'release' run: | gh release upload "${{ github.ref_name }}" Cadabra_*_${{ matrix.arch }}.AppImage --clobber env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # - name: Generate zsync file # if: github.event_name == 'release' # run: | # # Install zsync # sudo apt install -y zsync # # # Generate zsync file for the AppImage # APPIMAGE_FILE=$(find . -name "Cadabra_*_${{ matrix.arch }}.AppImage" -type f | head -n 1) # if [ -n \"$APPIMAGE_FILE\" ]; then # zsyncmake \"$APPIMAGE_FILE\" # # # Upload zsync file to release # gh release upload ${{ github.ref_name }} *.zsync --clobber || true # fi # env: # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} ================================================ FILE: .github/workflows/c++lib.yml ================================================ # This is a build which gets triggered on every commit push, to # ensure that Cadabra builds as c++lib. Does not yet contain any # tests, it just checks for build issues. name: c++lib on: [push] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: get dependencies run: sudo apt-get update && sudo DEBIAN_FRONTEND=noninteractive apt-get install git cmake python3-dev g++ libpcre3 libpcre3-dev libgmp3-dev libboost-all-dev libgmp-dev libsqlite3-dev uuid-dev libmpfr-dev libmpc-dev - name: configure run: mkdir build-lib && cd build-lib && cmake -DBUILD_AS_CPP_LIBRARY=ON .. - name: make run: cd build-lib && make ================================================ FILE: .github/workflows/docker.yml ================================================ # Build docker images with a Jupyter server with Cadabra # kernel whenever a commit is pushed to github. # # Lint this thing with # # yq eval docker.yml name: Docker # on: [push] on: release: types: [released] jobs: build: runs-on: ubuntu-latest # strategy: # fail-fast: false # matrix: # arch: # - amd64 # - arm64 steps: - name: Prepare run: | platform=${{ matrix.platform }} echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV - name: Set up QEMU uses: docker/setup-qemu-action@master - uses: actions/checkout@v3 # - name: Exit if not on master branch # if: github.ref != 'refs/heads/master' # run: exit 1 - name: Log in to Docker Hub uses: docker/login-action@master with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Extract metadata (tags, labels) for Docker id: meta uses: docker/metadata-action@master with: images: kpeeters/cadabra2-jupyter - name: Set up Docker Buildx uses: docker/setup-buildx-action@master - name: Build Docker image uses: docker/build-push-action@master with: platforms: linux/amd64, linux/arm64 context: . file: docker/Dockerfile push: true #${{ github.ref == 'master' }} labels: ${{ steps.meta.outputs.labels }} tags: kpeeters/cadabra2-jupyter:latest # outputs: type=docker # - name: Upload artifact to github assets # uses: actions/upload-artifact@main # with: # name: cadabra2-jupyter-${{ matrix.arch }} # path: cadabra2-jupyter-${{ matrix.arch }}.tar ================================================ FILE: .github/workflows/fedora-40-package.yml ================================================ # Create a Fedora 40 package on a github release event. # This assumes that the cadabra version is the same as the # release name, and it will attempt to add the .rpm file # to the release assets. name: Fedora-40 package # on: [push] on: release: types: [created] jobs: build: strategy: matrix: include: - os: ubuntu-24.04 name: x86_64 display-name: "Fedora 40 x86_64" - os: ubuntu-24.04-arm name: arm64 display-name: "Fedora 40 arm64" runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@master - name: Pull Fedora image run: docker pull fedora:40 - name: Set up GitHub CLI run: | sudo apt-get update sudo apt-get install -y gh - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Build RPM in Fedora container run: | docker run --rm \ -v ${{ github.workspace }}:/workspace \ -w /workspace \ fedora:40 \ bash -c " git config --global --add safe.directory /workspace dnf install -y rpm-build make gcc-c++ git python3-devel cmake gmp-devel libuuid-devel sqlite-devel openssl-devel gtkmm30-devel boost-devel python3-matplotlib python3-pip pip3 install sympy mkdir build cd build cmake -DPACKAGING_MODE=ON -DENABLE_MATHEMATICA=OFF -DCMAKE_INSTALL_PREFIX=/usr .. make cpack " - name: Set version variables from output of cmake run: | VER=$(cat ${{ github.workspace }}/build/VERSION) echo "VERSION=$VER" >> $GITHUB_ENV - name: Upload Release Assets run: | gh release upload "${{ env.VERSION }}" build/cadabra2-${{ env.VERSION }}-fedora40-${{ matrix.name }}.rpm --clobber env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} test: needs: build runs-on: ubuntu-24.04 steps: - name: Set up Docker Buildx uses: docker/setup-buildx-action@master - name: Pull Fedora image run: docker pull fedora:40 - name: Download package and run inside Fedora container run: | docker run --rm \ -v ${{ github.workspace }}:/workspace \ -w /workspace \ fedora:40 \ bash -c " dnf install -y xorg-x11-server-Xvfb wget curl jq export VERSION=\$(curl -s https://api.github.com/repos/kpeeters/cadabra2/releases|& jq .[0].tag_name -r) export RPMNAME=cadabra2-\${VERSION}-fedora40-x86_64.rpm wget https://github.com/kpeeters/cadabra2/releases/download/\${VERSION}/\${RPMNAME} dnf install -y \${RPMNAME} printf 'import sys\nprint(sys.path)\nimport cdb.main\nex:=(A+B) (C+D);\ndistribute(ex);\nquit()\n' > tst.cdb cadabra2 tst.cdb xvfb-run -a cadabra2-gtk & APP_PID=\$! sleep 10 if kill -0 \$APP_PID 2>/dev/null; then echo 'cadabra2-gtk started successfully' kill \$APP_PID exit 0 else echo 'cadabra2-gtk failed to start' exit 1 fi " ================================================ FILE: .github/workflows/fedora-41-package.yml ================================================ # Create a Fedora 41 package on a github release event. # This assumes that the cadabra version is the same as the # release name, and it will attempt to add the .rpm file # to the release assets. name: Fedora-41 package # on: [push] on: release: types: [created] jobs: build: strategy: matrix: include: - os: ubuntu-24.04 name: x86_64 display-name: "Fedora 41 x86_64" - os: ubuntu-24.04-arm name: arm64 display-name: "Fedora 41 arm64" runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@master - name: Pull Fedora image run: docker pull fedora:41 - name: Set up GitHub CLI run: | sudo apt-get update sudo apt-get install -y gh - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Build RPM in Fedora container run: | docker run --rm \ -v ${{ github.workspace }}:/workspace \ -w /workspace \ fedora:41 \ bash -c " git config --global --add safe.directory /workspace dnf install -y rpm-build make gcc-c++ git python3-devel cmake gmp-devel libuuid-devel sqlite-devel openssl-devel gtkmm30-devel boost-devel python3-matplotlib python3-pip pip3 install sympy mkdir build cd build cmake -DPACKAGING_MODE=ON -DENABLE_MATHEMATICA=OFF -DCMAKE_INSTALL_PREFIX=/usr .. make cpack " - name: Set version variables from output of cmake run: | VER=$(cat ${{ github.workspace }}/build/VERSION) echo "VERSION=$VER" >> $GITHUB_ENV - name: Upload Release Assets run: | gh release upload "${{ env.VERSION }}" build/cadabra2-${{ env.VERSION }}-fedora41-${{ matrix.name }}.rpm --clobber env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} test: needs: build runs-on: ubuntu-24.04 steps: - name: Set up Docker Buildx uses: docker/setup-buildx-action@master - name: Pull Fedora image run: docker pull fedora:41 - name: Download package and run inside Fedora container run: | docker run --rm \ -v ${{ github.workspace }}:/workspace \ -w /workspace \ fedora:41 \ bash -c " dnf install -y xorg-x11-server-Xvfb wget curl jq export VERSION=\$(curl -s https://api.github.com/repos/kpeeters/cadabra2/releases|& jq .[0].tag_name -r) export RPMNAME=cadabra2-\${VERSION}-fedora41-x86_64.rpm wget https://github.com/kpeeters/cadabra2/releases/download/\${VERSION}/\${RPMNAME} dnf install -y \${RPMNAME} printf 'import sys\nprint(sys.path)\nimport cdb.main\nex:=(A+B) (C+D);\ndistribute(ex);\nquit()\n' > tst.cdb cadabra2 tst.cdb xvfb-run -a cadabra2-gtk & APP_PID=\$! sleep 10 if kill -0 \$APP_PID 2>/dev/null; then echo 'cadabra2-gtk started successfully' kill \$APP_PID exit 0 else echo 'cadabra2-gtk failed to start' exit 1 fi " ================================================ FILE: .github/workflows/fedora-42-package.yml ================================================ # Create a Fedora 41 package on a github release event. # This assumes that the cadabra version is the same as the # release name, and it will attempt to add the .rpm file # to the release assets. name: Fedora-42 package # on: [push] on: release: types: [created] jobs: build: strategy: matrix: include: - os: ubuntu-24.04 name: x86_64 display-name: "Fedora 42 x86_64" - os: ubuntu-24.04-arm name: arm64 display-name: "Fedora 42 arm64" runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@master - name: Pull Fedora image run: docker pull fedora:42 - name: Set up GitHub CLI run: | sudo apt-get update sudo apt-get install -y gh - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Build RPM in Fedora container run: | docker run --rm \ -v ${{ github.workspace }}:/workspace \ -w /workspace \ fedora:42 \ bash -c " git config --global --add safe.directory /workspace dnf install -y rpm-build make gcc-c++ git python3-devel cmake gmp-devel libuuid-devel sqlite-devel openssl-devel gtkmm30-devel boost-devel python3-matplotlib python3-pip pip3 install sympy mkdir build cd build cmake -DPACKAGING_MODE=ON -DENABLE_MATHEMATICA=OFF -DCMAKE_INSTALL_PREFIX=/usr .. make cpack " - name: Set version variables from output of cmake run: | VER=$(cat ${{ github.workspace }}/build/VERSION) echo "VERSION=$VER" >> $GITHUB_ENV - name: Upload Release Assets if: github.event_name == 'release' run: | gh release upload "${{ env.VERSION }}" build/cadabra2-${{ env.VERSION }}-fedora42-${{ matrix.name }}.rpm --clobber env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Store package as build artifact uses: actions/upload-artifact@v4 if: github.event_name == 'push' with: name: cadabra2-fedora42-${{ matrix.name }}.rpm path: build/cadabra2-${{ env.VERSION }}-fedora42-${{ matrix.name }}.rpm test: needs: build runs-on: ubuntu-latest if: github.event_name == 'release' # only test release builds steps: - name: Set up Docker Buildx uses: docker/setup-buildx-action@master - name: Pull Fedora image run: docker pull fedora:42 - name: Download package and run inside Fedora container run: | docker run --rm \ -v ${{ github.workspace }}:/workspace \ -w /workspace \ fedora:42 \ bash -c " dnf install -y xorg-x11-server-Xvfb wget curl jq export VERSION=\$(curl -s https://api.github.com/repos/kpeeters/cadabra2/releases|& jq .[0].tag_name -r) export RPMNAME=cadabra2-\${VERSION}-fedora42-x86_64.rpm wget https://github.com/kpeeters/cadabra2/releases/download/\${VERSION}/\${RPMNAME} dnf install -y \${RPMNAME} printf 'import sys\nprint(sys.path)\nimport cdb.main\nex:=(A+B) (C+D);\ndistribute(ex);\nquit()\n' > tst.cdb cadabra2 tst.cdb xvfb-run -a cadabra2-gtk & APP_PID=\$! sleep 10 if kill -0 \$APP_PID 2>/dev/null; then echo 'cadabra2-gtk started successfully' kill \$APP_PID exit 0 else echo 'cadabra2-gtk failed to start' exit 1 fi " ================================================ FILE: .github/workflows/freebsd.yml ================================================ # This is a build which gets triggered on every commit push, to # ensure that we get some warnings when we push code that does # not build on Linux. name: FreeBSD on: [push] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Build and test in FreeBSD id: test uses: vmactions/freebsd-vm@v1 with: usesh: true prepare: | pkg install -y curl cmake python311 gettext-runtime pkgconf boost-libs pkg install -y fontconfig harfbuzz pcre jsoncpp gmp atkmm cairomm glibmm gtkmm30 pangomm pkg install -y git py311-sympy py311-numpy run: | pwd env freebsd-version sysctl hw.model sysctl hw.ncpu sysctl hw.physmem sysctl hw.usermem mkdir build && cd build && cmake -DENABLE_MATHEMATICA=OFF .. make make install make check ================================================ FILE: .github/workflows/homebrew-devel.yml ================================================ # This is a build which gets triggered on every push to the # 'devel' branch, to update the homebrew-repo the cadabra2-devel # package. name: Homebrew-devel on: push: branches: - devel jobs: build: runs-on: ubuntu-latest steps: - name: Checkout Cadabra uses: actions/checkout@v4 with: ref: 'devel' - name: Checkout Homebrew repo uses: actions/checkout@v4 with: repository: kpeeters/homebrew-repo path: homebrew-repo token: ${{ secrets.ACTIONS_HOMEBREW_REPO_TOKEN }} - name: Set up GitHub CLI run: | sudo apt-get update sudo apt-get install -y gh - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Get dependencies run: sudo apt-get update && sudo DEBIAN_FRONTEND=noninteractive apt-get install git cmake jq python3-dev g++ libpcre3 libpcre3-dev libgmp3-dev libgtkmm-3.0-dev libboost-all-dev libgmp-dev libsqlite3-dev uuid-dev libmpfr-dev libmpc-dev - name: Configure run: mkdir build && cd build && cmake -DPACKAGING_MODE=ON -DENABLE_MATHEMATICA=OFF -DCMAKE_INSTALL_PREFIX=/usr .. - name: Set variables run: | export VERSION=$(cat build/VERSION) export COMMIT=$(curl -s https://api.github.com/repos/kpeeters/cadabra2/commits/devel | jq -r .sha) wget https://github.com/kpeeters/cadabra2/archive/${COMMIT}.tar.gz export SHA=`cat ${COMMIT}.tar.gz | sha256sum -b | cut -d " " -f 1` echo "SHA=${SHA}" >> $GITHUB_ENV echo "VERSION=$VERSION" >> $GITHUB_ENV echo "COMMIT=$COMMIT" >> $GITHUB_ENV - name: Update Homebrew repo run: | cd homebrew-repo cat cadabra2-devel.rb | sed -e 's/^ url .*/ url "https:\/\/github.com\/kpeeters\/cadabra2\/archive\/${{ env.COMMIT }}.tar.gz"/' | sed -e 's/^ sha256.*/ sha256 "${{ env.SHA }}"/' | sed -e 's/^ version.*/ version "${{ env.VERSION }}.${{ env.COMMIT }}"/' > out.rb mv out.rb cadabra2-devel.rb cat cadabra2-devel.rb git config user.name "Kasper Peeters" git config user.email "info@cadabra.science" git add cadabra2-devel.rb git diff-index --quiet HEAD || (git commit -a -m "Update to release ${{ env.VERSION }} commit ${{ env.COMMIT }}." && git push) test: needs: build runs-on: macos-latest steps: - name: get dependencies run: brew update && brew upgrade && brew tap kpeeters/repo && brew install --verbose cadabra2-devel - name: show where cadabra is located run: cat `which cadabra2` - name: test simple cli run run: printf "import sys\nprint(sys.path)\nimport cdb.main\nex:=(A+B) (C+D);\ndistribute(ex);\nquit()\n" > tst.cdb && cadabra2 tst.cdb - name: test gtk app launch run: | cadabra2-gtk & APP_PID=$! sleep 10 if kill -0 $APP_PID 2>/dev/null; then echo "cadabra2-gtk started successfully" kill $APP_PID exit 0 else echo "cadabra2-gtk failed to start" exit 1 fi - name: Upload build logs if: always() uses: actions/upload-artifact@v4 with: name: brew-build-logs path: | ~/Library/Logs/Homebrew/ retention-days: 2 ================================================ FILE: .github/workflows/homebrew.yml ================================================ # This is a build which gets triggered on every release, to # update the homebrew-repo. name: Homebrew on: release: types: [released] # on: [push] jobs: build: runs-on: ubuntu-latest steps: - name: Checkout Cadabra uses: actions/checkout@v4 - name: Checkout Homebrew repo uses: actions/checkout@v4 with: repository: kpeeters/homebrew-repo path: homebrew-repo token: ${{ secrets.ACTIONS_HOMEBREW_REPO_TOKEN }} - name: Set up GitHub CLI run: | sudo apt-get update sudo apt-get install -y gh - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Get dependencies run: sudo apt-get update && sudo DEBIAN_FRONTEND=noninteractive apt-get install git cmake jq python3-dev g++ libpcre3 libpcre3-dev libgmp3-dev libgtkmm-3.0-dev libboost-all-dev libgmp-dev libsqlite3-dev uuid-dev libmpfr-dev libmpc-dev - name: Configure run: mkdir build && cd build && cmake -DPACKAGING_MODE=ON -DENABLE_MATHEMATICA=OFF -DCMAKE_INSTALL_PREFIX=/usr .. - name: Set variables run: | export VERSION=$(cat build/VERSION) echo "VERSION=$VERSION" >> $GITHUB_ENV wget https://github.com/kpeeters/cadabra2/archive/refs/tags/${VERSION}.tar.gz export SHA=`cat ${VERSION}.tar.gz | sha256sum -b | cut -d " " -f 1` echo "SHA=${SHA}" >> $GITHUB_ENV - name: Update Homebrew repo run: | cd homebrew-repo cat cadabra2.rb | sed -e 's/^ url .*/ url "https:\/\/github.com\/kpeeters\/cadabra2\/archive\/refs\/tags\/${{ env.VERSION }}.tar.gz"/' | sed -e 's/^ sha256.*/ sha256 "${{ env.SHA }}"/' | sed -e 's/^ version.*/ version "${{ env.VERSION }}"/' > out.rb mv out.rb cadabra2.rb cat cadabra2.rb git config user.name "Kasper Peeters" git config user.email "info@cadabra.science" git add cadabra2.rb git diff-index --quiet HEAD || (git commit -a -m "Update to release ${{ env.VERSION }}." && git push) test: needs: build runs-on: macos-latest steps: - name: get dependencies run: brew update && brew upgrade && brew tap kpeeters/repo && brew install cadabra2 - name: show where cadabra is located run: cat `which cadabra2` - name: test simple cli run run: printf "ex:=(A+B) (C+D);\ndistribute(ex);\nquit()\n" > tst.cdb && cadabra2 tst.cdb - name: test gtk app launch run: | cadabra2-gtk & APP_PID=$! sleep 10 if kill -0 $APP_PID 2>/dev/null; then echo "Application started successfully" kill $APP_PID exit 0 else echo "Application failed to start" exit 1 fi ================================================ FILE: .github/workflows/linux.yml ================================================ # This is a build which gets triggered on every commit push, to # ensure that we get some warnings when we push code that does # not build on Linux. name: Linux on: [push] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 # - name: Exit if not on devel branch # if: github.ref != 'refs/heads/devel' # run: exit 1 # - name: setup python # uses: actions/setup-python@v4 # with: # python-version: 3.11 - name: get dependencies run: sudo apt-get update && sudo DEBIAN_FRONTEND=noninteractive apt-get install ninja-build git cmake python3-dev g++ libpcre3 libpcre3-dev libgmp3-dev libgtkmm-3.0-dev libboost-all-dev libgmp-dev libsqlite3-dev uuid-dev libmpfr-dev libmpc-dev libtbb-dev catch2 && python3 --version && which python3 && python3 -m pip install --upgrade pip && python3 -m pip install wheel && python3 -m pip install sympy gmpy2 numpy - name: configure run: mkdir build && cd build && cmake -G Ninja -DENABLE_MATHEMATICA=OFF .. - name: build run: cd build && cmake --build . - name: test env: CTEST_OUTPUT_ON_FAILURE: 1 run: cd build && ctest ARGS="-V" ================================================ FILE: .github/workflows/macos.yml ================================================ # This is a build which gets triggered on every commit push, to # ensure that we get some warnings when we push code that does # not build on macOS. name: macOS on: [push] jobs: build: runs-on: macos-latest steps: - uses: actions/checkout@v4 # - name: Exit if not on devel branch # if: github.ref != 'refs/heads/devel' # run: exit 1 # - name: remove python 3.12 # run: brew uninstall python@3.12 - name: get dependencies run: brew install cmake ninja boost pcre gmp python@3.13 pkgconfig gtkmm3 adwaita-icon-theme catch2 tbb && python3 -m pip install --break-system-packages --user sympy gmpy2 numpy - name: configure run: mkdir build && cd build && cmake -G Ninja -DENABLE_MATHEMATICA=OFF -DPython_EXECUTABLE=python3.12 .. - name: build run: cd build && cmake --build . - name: test env: CTEST_OUTPUT_ON_FAILURE: 1 run: cd build && ctest ARGS="-V" ================================================ FILE: .github/workflows/opensuse-tumbleweed-package.yml ================================================ # Create a OpenSUSE Tumbleweed package on a github release event. # This assumes that the cadabra version is the same as the # release name, and it will attempt to add the .rpm file # to the release assets. name: OpenSUSE-Tumbleweed package # on: [push] on: release: types: [created] jobs: build: runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 # - name: Exit if not on devel branch # if: github.ref != 'refs/heads/devel' # run: exit 1 - name: Set up Docker Buildx uses: docker/setup-buildx-action@master - name: Pull OpenSUSE image run: docker pull opensuse/tumbleweed:latest - name: Set up GitHub CLI run: | sudo apt-get update sudo apt-get install -y gh - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Build RPM run: | docker run --rm \ -v ${{ github.workspace }}:/workspace \ -w /workspace \ opensuse/tumbleweed:latest \ bash -c " zypper refresh zypper update zypper -n install --no-recommends rpmbuild git cmake python313-devel gcc-c++ \ gmp-devel libuuid-devel \ gtkmm3-devel sqlite3-devel \ python313-matplotlib libopenssl-devel \ libboost_system-devel libboost_filesystem-devel \ libboost_date_time-devel libboost_program_options-devel git config --global --add safe.directory /workspace mkdir build cd build cmake -DPACKAGING_MODE=ON -DENABLE_MATHEMATICA=OFF -DCMAKE_INSTALL_PREFIX=/usr .. make cpack " - name: Set version variables from output of cmake run: | VER=$(cat ${{ github.workspace }}/build/VERSION) echo "VERSION=$VER" >> $GITHUB_ENV - name: Upload Release Assets run: | gh release upload "${{ env.VERSION }}" build/cadabra2-${{ env.VERSION }}-tumbleweed.rpm --clobber env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} test: needs: build runs-on: ubuntu-24.04 steps: - name: Set up Docker Buildx uses: docker/setup-buildx-action@master - name: Pull OpenSUSE image run: docker pull opensuse/tumbleweed:latest - name: Download package and run inside OpenSUSE container run: | docker run --rm \ -v ${{ github.workspace }}:/workspace \ -w /workspace \ opensuse/tumbleweed:latest \ bash -c " zypper clean --all zypper refresh zypper update zypper se -s libboost_filesystem zypper -n install xvfb-run wget curl jq export VERSION=\$(curl -s https://api.github.com/repos/kpeeters/cadabra2/releases|& jq .[0].tag_name -r) export RPMNAME=cadabra2-\${VERSION}-tumbleweed.rpm wget https://github.com/kpeeters/cadabra2/releases/download/\${VERSION}/\${RPMNAME} zypper --no-gpg-checks -n install \${RPMNAME} printf 'import sys\nprint(sys.path)\nimport cdb.main\nex:=(A+B) (C+D);\ndistribute(ex);\nquit()\n' > tst.cdb cadabra2 tst.cdb xvfb-run -a cadabra2-gtk & APP_PID=\$! sleep 10 if kill -0 \$APP_PID 2>/dev/null; then echo 'cadabra2-gtk started successfully' kill \$APP_PID exit 0 else echo 'cadabra2-gtk failed to start' exit 1 fi " ================================================ FILE: .github/workflows/tarball.yml ================================================ # This is a build which gets triggered on every release, to # generate a tarball which includes all submodules. name: Tarball on: release: types: [released] # on: [push] jobs: build: runs-on: ubuntu-latest steps: - name: Checkout Cadabra uses: actions/checkout@v4 - name: Set up GitHub CLI run: | sudo apt-get update sudo apt-get install -y gh - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Create tarball run: | export CDB_MAJOR=`cat cmake/version.cmake |grep 'MAJOR '|sed -e 's/[^ ]* \([0-9]*\).*/\1/'` export CDB_MINOR=`cat cmake/version.cmake |grep 'MINOR '|sed -e 's/[^ ]* \([0-9]*\).*/\1/'` export CDB_PATCH=`cat cmake/version.cmake |grep 'PATCH '|sed -e 's/[^ ]* \([0-9]*\).*/\1/'` export VERSION=${CDB_MAJOR}.${CDB_MINOR}.${CDB_PATCH} echo "VERSION=$VERSION" >> $GITHUB_ENV mkdir -p submodules cd submodules git clone -b kpeeters/cadabra https://github.com/kpeeters/MicroTeX.git microtex cd ../.. mv cadabra2 cadabra2-${VERSION} tar zcf cadabra2-${VERSION}-source-inclusive.tar.gz --exclude ".git" cadabra2-${VERSION} mv cadabra2-${VERSION} cadabra2 - name: Upload tarball to assets run: | gh release upload "${{ env.VERSION }}" ${{ github.workspace }}/../cadabra2-${VERSION}-source-inclusive.tar.gz --clobber env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} ================================================ FILE: .github/workflows/ubuntu-22.04-package.yml ================================================ # Create an Ubuntu 22.04 package on a github release event. # This assumes that the cadabra version is the same as the # release name, and it will attempt to add the .deb file # to the release assets. name: Ubuntu-22.04 package # on: [push] on: release: types: [created] jobs: build: strategy: matrix: include: - os: ubuntu-22.04 name: x86_64 display-name: "Ubuntu 22.04 x86_64" - os: ubuntu-22.04-arm name: arm64 display-name: "Ubuntu 22.04 arm64" runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 # - name: Exit if not on devel branch # if: github.ref != 'refs/heads/devel' # run: exit 1 - name: Set up GitHub CLI run: | sudo apt-get update sudo apt-get install -y gh - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Get dependencies run: sudo apt-get update && sudo DEBIAN_FRONTEND=noninteractive apt-get install git cmake ninja-build python3-dev g++ libpcre3 libpcre3-dev libgmp3-dev libgtkmm-3.0-dev libboost-all-dev libgmp-dev libsqlite3-dev uuid-dev libmpfr-dev libmpc-dev libtbb-dev catch2 && python3 --version && which python3 && python3 -m pip install --upgrade pip && python3 -m pip install wheel && python3 -m pip install sympy gmpy2 numpy - name: Configure run: mkdir build && cd build && cmake -G Ninja -DPACKAGING_MODE=ON -DENABLE_MATHEMATICA=OFF -DCMAKE_INSTALL_PREFIX=/usr .. - name: Build run: cd build && cmake --build . - name: Create the .deb package run: cd build && cpack - name: Set version variables from output of cmake run: | VER=$(cat build/VERSION) echo "VERSION=$VER" >> $GITHUB_ENV - name: Upload Release Assets run: | gh release upload "${{ env.VERSION }}" build/cadabra2-${{ env.VERSION }}-ubuntu-22.04-jammy-${{ matrix.name }}.deb --clobber env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} test: needs: build runs-on: ubuntu-22.04 steps: - name: Get and install package run: | sudo apt install xvfb export VERSION=$(curl -s https://api.github.com/repos/kpeeters/cadabra2/releases|& jq .[0].tag_name -r) export DEBNAME=cadabra2-${VERSION}-ubuntu-22.04-jammy-x86_64.deb wget https://github.com/kpeeters/cadabra2/releases/download/${VERSION}/${DEBNAME} sudo apt -y update sudo apt -y upgrade sudo apt -y install ./${DEBNAME} - name: Test simple cli run run: printf "import sys\nprint(sys.path)\nimport cdb.main\nex:=(A+B) (C+D);\ndistribute(ex);\nquit()\n" > tst.cdb && cadabra2 tst.cdb - name: Test gtk app launch run: | xvfb-run -a cadabra2-gtk & APP_PID=$! sleep 10 if kill -0 $APP_PID 2>/dev/null; then echo "Application started successfully" kill $APP_PID exit 0 else echo "Application failed to start" exit 1 fi ================================================ FILE: .github/workflows/ubuntu-24.04-package.yml ================================================ # Create an Ubuntu 24.04 package on a github release event. # This assumes that the cadabra version is the same as the # release name, and it will attempt to add the .deb file # to the release assets. name: Ubuntu-24.04 package # on: [push] on: release: types: [created] jobs: build: strategy: matrix: include: - os: ubuntu-24.04 name: x86_64 display-name: "Ubuntu 24.04 x86_64" - os: ubuntu-24.04-arm name: arm64 display-name: "Ubuntu 24.04 arm64" runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 # - name: Exit if not on devel branch # if: github.ref != 'refs/heads/devel' # run: exit 1 - name: Set up GitHub CLI run: | sudo apt-get update sudo apt-get install -y gh - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Get dependencies run: sudo apt-get update && sudo DEBIAN_FRONTEND=noninteractive apt-get install git cmake ninja-build python3-dev g++ libpcre3 libpcre3-dev libgmp3-dev libgtkmm-3.0-dev libboost-all-dev libgmp-dev libsqlite3-dev uuid-dev libmpfr-dev libmpc-dev libtbb-dev catch2 && python3 --version && which python3 && python3 -m pip install --upgrade pip && python3 -m pip install wheel && python3 -m pip install sympy gmpy2 numpy - name: configure run: mkdir build && cd build && cmake -G Ninja -DPACKAGING_MODE=ON -DENABLE_MATHEMATICA=OFF -DCMAKE_INSTALL_PREFIX=/usr .. - name: Make run: cd build && cmake --build . - name: Create the .deb package run: cd build && cpack - name: Set version variables from output of cmake run: | VER=$(cat build/VERSION) echo "VERSION=$VER" >> $GITHUB_ENV - name: Upload Release Assets if: github.event_name == 'release' run: | gh release upload "${{ env.VERSION }}" build/cadabra2-${{ env.VERSION }}-ubuntu-24.04-noble-${{ matrix.name }}.deb --clobber env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Store package as build artifact uses: actions/upload-artifact@v4 if: github.event_name == 'push' with: name: cadabra2-ubuntu-24.04-noble-${{ matrix.name }}.deb path: build/cadabra2-${{ env.VERSION }}-ubuntu-24.04-noble.deb test: needs: build runs-on: ubuntu-24.04 steps: - name: Install prerequisites for testing run: | sudo apt install xvfb imagemagick sudo apt-mark hold firefox - name: Get and install package (release mode) if: github.event_name == 'release' run: | export VERSION=$(curl -s https://api.github.com/repos/kpeeters/cadabra2/releases|& jq .[0].tag_name -r) export DEBNAME=cadabra2-${VERSION}-ubuntu-24.04-noble-x86_64.deb wget https://github.com/kpeeters/cadabra2/releases/download/${VERSION}/${DEBNAME} sudo apt -y update sudo apt -y upgrade sudo apt -y install ./${DEBNAME} - name: Download package (push mode) uses: actions/download-artifact@v4 if: github.event_name == 'push' with: name: cadabra2-ubuntu-24.04-noble-x86_64.deb - name: Install package (push mode) if: github.event_name == 'push' run: | ls -la sudo apt -y update sudo apt -y upgrade sudo apt -y install ./cadabra2*.deb - name: Test simple cli run run: printf "import sys\nprint(sys.path)\nimport cdb.main\nex:=(A+B) (C+D);\ndistribute(ex);\nquit()\n" > tst.cdb && cadabra2 tst.cdb - name: Test gtk app launch and take screenshot run: | MESA_LOADER_DRIVER_OVERRIDE=llvmpipe LIBGL_ALWAYS_SOFTWARE=1 xvfb-run -a -s "-screen 0 1920x1080x24 -ac" cadabra2-gtk -n & APP_PID=$! sleep 10 if kill -0 $APP_PID 2>/dev/null; then echo "Application started successfully" DISPLAY=:99 import -window root screenshot.png kill $APP_PID exit 0 else echo "Application failed to start" exit 1 fi - name: Upload screenshot uses: actions/upload-artifact@v4 with: name: app-screenshot-ubuntu-24.04 path: screenshot.png ================================================ FILE: .github/workflows/windows-installer.yml ================================================ # Create Windows installer on a github release event. This assumes # that the cadabra version is the same as the release name, and it # will attempt to add the installer file to the release assets. name: Windows 11 installer # on: [push] on: release: types: [created] defaults: run: shell: msys2 {0} jobs: msys2-ucrt64: strategy: matrix: include: - os: windows-2022 name: x86_64 display-name: "Windows 11 x86_64" msystem: ucrt64 midfix: ucrt-x86_64 - os: windows-11-arm name: arm64 msystem: clangarm64 midfix: clang-aarch64 display-name: "Windows 11 arm64" runs-on: ${{ matrix.os }} name: Windows 11 defaults: run: shell: msys2 {0} steps: - name: Install WiX v4/v5 Tool run: | dotnet tool install --tool-path c:\WiX\bin wix --version 5.0.2 c:\WiX\bin\wix extension add --global WixToolset.UI.wixext/5.0.2 shell: pwsh - name: Add WiX toolkit to PATH shell: bash run: | echo "WiX installed at ${WIX}, hopefully..." echo "Listing C:/WiX/bin" ls "C:/WiX/bin" echo "Listing Program Files (x86):" ls "C:/Program Files (x86)/" echo "Listing Program Files:" ls "C:/Program Files/" echo "${WIX}bin" >> $GITHUB_PATH - uses: msys2/setup-msys2@v2 with: msystem: ${{ matrix.msystem }} update: true install: >- curl git mingw-w64-${{ matrix.midfix }}-gcc mingw-w64-${{ matrix.midfix }}-python mingw-w64-${{ matrix.midfix }}-gtkmm3 mingw-w64-${{ matrix.midfix }}-boost mingw-w64-${{ matrix.midfix }}-sqlite3 mingw-w64-${{ matrix.midfix }}-cmake mingw-w64-${{ matrix.midfix }}-python-matplotlib mingw-w64-${{ matrix.midfix }}-python-sympy mingw-w64-${{ matrix.midfix }}-github-cli - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: actions/checkout@master name: Checkout source - name: Build run: | mkdir build cd build # Turn off searching Python in the registry, as that contains the # windows Python which is not the MSYS python that we want to use. cmake -DPython_FIND_REGISTRY=NEVER .. ninja ninja install cpack - name: Upload WiX log file if: always() uses: actions/upload-artifact@v4 with: name: wix-${{ matrix.name }}.log path: build/_CPack_Packages/win64/WIX/wix.log retention-days: 2 - name: Set version variables from output of cmake run: | VER=$(cat build/VERSION) echo "VERSION=$VER" >> $GITHUB_ENV GITVER=$(cat build/GIT_TAG_VERSION) echo "GIT_TAG_VERSION=$GITVER" >> $GITHUB_ENV - name: Upload release assets run: | gh release upload "${{ env.GIT_TAG_VERSION }}" build/cadabra2-${{ env.VERSION }}-win64.msi --clobber env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} ================================================ FILE: .github/workflows/windows.yml ================================================ # Create Windows installer on a github push or release event # (upload to the release assets in the latter case). name: Windows 11 on: [push] #on: # release: # types: [created] defaults: run: shell: msys2 {0} jobs: msys2-ucrt64: strategy: matrix: include: - os: windows-2022 name: x86_64 display-name: "Windows 11 x86_64" msystem: ucrt64 midfix: ucrt-x86_64 - os: windows-11-arm name: arm64 msystem: clangarm64 midfix: clang-aarch64 display-name: "Windows 11 arm64" runs-on: ${{ matrix.os }} name: Windows build defaults: run: shell: msys2 {0} steps: - name: Install WiX v4/v5 Tool run: | dotnet tool install --tool-path c:\WiX\bin wix --version 5.0.2 c:\WiX\bin\wix extension add --global WixToolset.UI.wixext/5.0.2 shell: pwsh - name: Add WiX toolkit to PATH shell: bash run: | echo "WiX installed at ${WIX}, hopefully..." echo "Listing C:/WiX/bin" ls "C:/WiX/bin" echo "Listing Program Files (x86):" ls "C:/Program Files (x86)/" echo "Listing Program Files:" ls "C:/Program Files/" echo "${WIX}bin" >> $GITHUB_PATH - uses: msys2/setup-msys2@v2 with: msystem: ${{ matrix.msystem }} update: true install: >- curl git mingw-w64-${{ matrix.midfix }}-gcc mingw-w64-${{ matrix.midfix }}-python mingw-w64-${{ matrix.midfix }}-gtkmm3 mingw-w64-${{ matrix.midfix }}-boost mingw-w64-${{ matrix.midfix }}-sqlite3 mingw-w64-${{ matrix.midfix }}-cmake mingw-w64-${{ matrix.midfix }}-python-matplotlib mingw-w64-${{ matrix.midfix }}-python-sympy mingw-w64-${{ matrix.midfix }}-github-cli - name: Authenticate GitHub CLI run: gh auth setup-git env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: actions/checkout@master name: Checkout source - name: Build run: | mkdir build cd build # Turn off searching Python in the registry, as that contains the # windows Python which is not the MSYS python that we want to use. cmake -DPython_FIND_REGISTRY=NEVER .. ninja ninja install cpack - name: Upload cmake_install.cmake file if: always() uses: actions/upload-artifact@v4 with: name: cmake_install-${{ matrix.name }}.cmake path: build/cmake_install.cmake retention-days: 2 - name: Upload frontend/cmake_install.cmake file if: always() uses: actions/upload-artifact@v4 with: name: frontend-cmake_install-${{ matrix.name }}.cmake path: build/frontend/cmake_install.cmake retention-days: 2 - name: Upload frontend/gtkmm/cmake_install.cmake file if: always() uses: actions/upload-artifact@v4 with: name: frontend-gtkmm-cmake_install-${{ matrix.name }}.cmake path: build/frontend/gtkmm/cmake_install.cmake retention-days: 2 - name: Upload WiX log file if: always() uses: actions/upload-artifact@v4 with: name: wix-${{ matrix.name }}.log path: build/_CPack_Packages/win64/WIX/wix.log retention-days: 2 - name: Set version variables from output of cmake run: | VER=$(cat build/VERSION) echo "VERSION=$VER" >> $GITHUB_ENV GITVER=$(cat build/GIT_TAG_VERSION) echo "GIT_TAG_VERSION=$GITVER" >> $GITHUB_ENV - name: Upload installer as build artifact uses: actions/upload-artifact@v4 with: name: cadabra2-windows-${{ matrix.name }}.msi path: build/cadabra2-${{ env.VERSION }}-win64.msi retention-days: 2 - name: Upload release assets if: github.event_name == 'release' run: | mv build/cadabra2-${{ env.VERSION }}-win64.msi build/cadabra2-${{ env.GIT_TAG_VERSION }}-windows-${{ matrix.name }}.msi gh release upload "${{ env.GIT_TAG_VERSION }}" build/cadabra2-${{ env.GIT_TAG_VERSION }}-windows-${{ matrix.name }}.msi --clobber env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} test: needs: msys2-ucrt64 strategy: fail-fast: false matrix: include: - os: windows-2022 name: x86_64 display-name: "Windows 11 x86_64" - os: windows-11-arm name: arm64 display-name: "Windows 11 arm64" runs-on: ${{ matrix.os }} steps: - name: Download installer uses: actions/download-artifact@v4 with: name: cadabra2-windows-${{ matrix.name }}.msi - name: Install silently shell: pwsh run: | dir $installer = Get-ChildItem -Filter "cadabra*.msi" | Select-Object -First 1 -ExpandProperty Name if (-not $installer) { Write-Error "No installer found" exit 1 } $installerPath = (Get-Item -Path ".\$installer").FullName Write-Host "Installing $installerPath" Start-Process -FilePath "msiexec" -ArgumentList "/i", "$installerPath", "/qn", "/l*v", "install.log" -Wait Write-Host "Installer finished" - name: Upload installation log uses: actions/upload-artifact@v4 with: name: install-${{ matrix.os }}.log path: install.log - name: Check installation location shell: cmd run: | echo "Main folder:" dir "C:\Program Files\Cadabra" - name: Start GUI and take screenshot shell: pwsh run: | # Attempt to start the notebook if (-not (Test-Path "C:\Program Files\Cadabra\cadabra2-gtk.exe")) { Write-Host "Executable not found" exit 1 } $process = Start-Process "C:\Program Files\Cadabra\cadabra2-gtk.exe" -ArgumentList "-n", "Examples\schwarzschild.cnb" -WorkingDirectory "C:\Program Files\Cadabra" -PassThru Start-Sleep -Seconds 120 # Take screenshot Add-Type -AssemblyName System.Windows.Forms Add-Type -AssemblyName System.Drawing $screen = [System.Windows.Forms.Screen]::PrimaryScreen.Bounds $bitmap = New-Object System.Drawing.Bitmap($screen.Width, $screen.Height) $graphics = [System.Drawing.Graphics]::FromImage($bitmap) $graphics.CopyFromScreen($screen.Location, [System.Drawing.Point]::Empty, $screen.Size) $bitmap.Save("screenshot.png") $graphics.Dispose() $bitmap.Dispose() # Cleanup Stop-Process -Id $process.Id -Force - name: Upload cadabra_log.txt if: always() uses: actions/upload-artifact@v4 with: name: cadabra_log-${{ matrix.os }}.txt path: C:\Windows\Temp\cadabra_log.txt - name: Upload screenshot uses: actions/upload-artifact@v4 with: name: app-screenshot-${{ matrix.os }} path: screenshot.png ================================================ FILE: .gitignore ================================================ .cache web2/cadabra2/source/book *.orig *.bak doxygen .DS_Store *~ *.o build build-lib jbuild install .ipynb_checkpoints __pycache__ .vscode .vs CMakeSettings.json config/pre_install.rtf .ccls-cache *appdata.xml config/install_script.iss config/science.cadabra.cadabra2-gtk.desktop ================================================ FILE: .gitmodules ================================================ [submodule "submodules/microtex"] path = submodules/microtex url = https://github.com/kpeeters/MicroTeX branch = kpeeters/cadabra ================================================ FILE: .travis.yml ================================================ branches: only: - master - feature/pybind os: - linux addons: apt: sources: - ubuntu-toolchain-r-test packages: - g++-6 env: - MATRIX_EVAL="CC=gcc-6 && CXX=g++-6" - osx compiler: - gcc language: cpp sudo: required dist: trusty env: global: - CTEST_EXT_COLOR_OUTPUT=TRUE - CTEST_BUILD_FLAGS=-j4 matrix: exclude: - os: osx compiler: gcc script: - cmake --build . - ctest -VV . before_script: - mkdir build - cd build - cmake .. before_install: - if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then openssl aes-256-cbc -K $encrypted_0c0fd3a7dbd0_key -iv $encrypted_0c0fd3a7dbd0_iv -in config/travisci_rsa.enc -out config/travisci_rsa -d; fi - if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then chmod 0600 config/travisci_rsa; cp config/travisci_rsa ~/.ssh/id_rsa; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get -qq update; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install -y cmake python3-dev g++ libpcre3 libpcre3-dev libgmp3-dev uuid-dev python3-pip; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install -y libgtkmm-3.0-dev libjsoncpp-dev libboost-regex-dev libboost-system-dev libboost-program-options-dev libboost-date-time-dev libboost-filesystem-dev libgmp-dev libsqlite3-dev; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; brew unlink json-c; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew outdated cmake || brew upgrade cmake; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew outdated boost || travis_wait brew upgrade boost; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew outdated gmp || brew upgrade gmp; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew outdated pkgconfig || brew upgrade pkgconfig; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install jsoncpp python3 gtkmm3 gnome-icon-theme; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip3 install matplotlib sympy; fi - if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then if [[ "`${CC} --version |grep gcc`" != "" ]]; then sudo apt-get install --yes doxygen graphviz texlive-font-utils; ./config/publish-doxygen; fi; fi; fi install: - pip3 install --user sympy notifications: email: recipients: - kasper.peeters@phi-sci.com on_success: always on_failure: always ================================================ FILE: CITATION.cff ================================================ cff-version: 1.2.0 message: "If you use this software, please cite it as below." authors: - family-names: "Peeters" given-names: "Kasper" orcid: "https://orcid.org/0000-0002-3077-8193" title: "My Research Software" version: 2.0.4 doi: 10.5281/zenodo.2500762 date-released: 2017-12-18 url: "https://github.com/kpeeters/cadabra2" preferred-citation: type: article authors: - family-names: "Peeters" given-names: "Kasper" orcid: "https://orcid.org/0000-0002-3077-8193" doi: doi.org/10.21105/joss.01118 journal: "The Journal of Open Source Software" start: 1118 # First page number end: 1119 # Last page number title: "Cadabra2: computer algebra for field theory revisited" issue: 3 volume: 32 year: 2018 ================================================ FILE: CMakeLists.txt ================================================ set(CMAKE_LEGACY_CYGWIN_WIN32 0) if(WIN32) # We want to use WiX 4.x but cpack only supports that from 3.30. set(CADABRA_CMAKE_VERSION 3.30) else() set(CADABRA_CMAKE_VERSION 3.12) endif() # Policy settings for CMake to resolve ambiguities. if(POLICY CMP0042) cmake_policy(SET CMP0042 NEW) message(STATUS "Set CMake policy CMP0042 to NEW") endif() if(POLICY CMP0054) cmake_policy(SET CMP0054 NEW) message(STATUS "Set CMake policy CMP0054 to NEW") endif() if(POLICY CMP0127) cmake_policy(SET CMP0127 NEW) message(STATUS "Set CMake policy CMP0127 to NEW") endif() if(POLICY CMP0148) cmake_policy(SET CMP0148 NEW) message(STATUS "Set CMake policy CMP0148 to NEW") endif() if(POLICY CMP0167) cmake_policy(SET CMP0167 NEW) message(STATUS "Set CMake policy CMP0167 to NEW") endif() if(POLICY CMP0094) cmake_policy(SET CMP0094 NEW) message(STATUS "Set CMake policy CMP0094 to NEW (use first Python found)") endif() if(POLICY CMP0169) cmake_policy(SET CMP0169 NEW) message(STATUS "Set CMake policy CMP0169 to NEW (use new FetchContent)") # CMake 3.30: call FetchContent_Populate() with just the name of a # dependency. This modern alternative was introduced in cmake 3.14 # but we still support 3.12; we call the old behaviour in # frontend/gtkmm/CMakeLists.txt endif() if(POLICY CMP0177) cmake_policy(SET CMP0177 NEW) message(STATUS "Set CMake policy CMP0177 to NEW (destination paths are normalised)") endif() if(POLICY CMP0087) cmake_policy(SET CMP0087 NEW) message(STATUS "Set CMake policy CMP0089 to NEW (evaluate generator expressions in install(CODE))") endif() cmake_minimum_required(VERSION ${CADABRA_CMAKE_VERSION}) set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD_REQUIRED ON) project(Cadabra) if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") set(MACOS TRUE) endif() #--------------------------------------------------------------------------- # Preamble #--------------------------------------------------------------------------- set(CMAKE_EXPORT_COMPILE_COMMANDS ON) # Aliases for directories set(CADABRA_ROOT_DIR ${CMAKE_SOURCE_DIR}) set(CADABRA_CLIENT_SERVER_DIR ${CADABRA_ROOT_DIR}/client_server) set(CADABRA_CORE_DIR ${CADABRA_ROOT_DIR}/core) set(CADABRA_FRONTEND_DIR ${CADABRA_ROOT_DIR}/frontend) set(CADABRA_IMAGES_DIR ${CADABRA_ROOT_DIR}/images) set(CADABRA_LIBS_DIR ${CADABRA_ROOT_DIR}/libs) include(cmake/functions.cmake) # Include Visual Studio specific build commands if (MSVC) # https://developercommunity.visualstudio.com/content/problem/618088/cmake-msvc-toolset-version-is-incorrect-in-visual.html if ((MSVC_VERSION EQUAL 1921 OR MSVC_VERSION EQUAL 1922) AND MSVC_TOOLSET_VERSION EQUAL 141) set(MSVC_TOOLSET_VERSION 142) endif() message(STATUS "MSVC_VERSION = ${MSVC_VERSION}, MSVC_TOOLSET_VERSION = ${MSVC_TOOLSET_VERSION}") include(cmake/windows.cmake) endif() # Make sure the build type is non-empty. if(NOT DEFINED CMAKE_BUILD_TYPE OR CMAKE_BUILD_TYPE STREQUAL "") set(CMAKE_BUILD_TYPE "Release") endif() set(CADABRA_BUILD_TYPE "${CMAKE_BUILD_TYPE}") if (CMAKE_BUILD_TYPE MATCHES "^Debug$") set(CADABRA_DEBUG_BUILD TRUE) endif() message(STATUS "Build type = ${CMAKE_BUILD_TYPE}") # Set path to additional cmake files set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake/modules") if (APPLE) set(ENV{PKG_CONFIG_PATH} "/usr/local/opt/libffi/lib/pkgconfig:") endif() set(PKG_CONFIG_USE_STATIC_LIBS OFF) if(NOT APPLE) find_package(TBB QUIET) if(TBB_FOUND) message(STATUS "TBB found, will make some operations parallel") else() message(STATUS "TBB not found, parallel operations disabled") endif() else() message(STATUS "Apple's toolchain not yet supporting TBB properly, disabled") endif() # Get version information. include(cmake/version.cmake) print_header("Building Cadabra version ${CADABRA_VERSION_SEM} (${SYSTEM_BITS}-bit)") message(STATUS "Build id '${CADABRA_VERSION_BUILD}' dated ${CADABRA_VERSION_DATE}") message(STATUS "Build mode is set to '${CMAKE_BUILD_TYPE}'") string(TOLOWER ${CMAKE_SYSTEM_PROCESSOR} STANDARD_ARCH_NAME) if(STANDARD_ARCH_NAME STREQUAL "aarch64") set(STANDARD_ARCH_NAME "arm64") endif() if(STANDARD_ARCH_NAME STREQUAL "amd64") set(STANDARD_ARCH_NAME "x86_64") endif() message(STATUS "Architecture is '${CMAKE_SYSTEM_PROCESSOR}' (package names will use '${STANDARD_ARCH_NAME}')") if(WIN32) if("${STANDARD_ARCH_NAME}" STREQUAL "x86_64") set(MSYS_ENV "ucrt64") set(WIX_SHORT_ARCH "x64") else() set(MSYS_ENV "clangarm64") set(WIX_SHORT_ARCH "arm64") endif() message(STATUS "MSYS environment set to ${MSYS_ENV}") endif() # Store the version number in a build/VERSION file (so that e.g. github # actions can pick it up). file(WRITE build/VERSION "${CADABRA_VERSION_SEM}") file(WRITE build/GIT_TAG_VERSION "${CADABRA_VERSION_GITHUB_TAG}") # Notify about install directory if ("${CMAKE_INSTALL_PREFIX}" STREQUAL "") message(STATUS "Install directory not set") else() message(STATUS "Install directory set to ${CMAKE_INSTALL_PREFIX}") endif() # Turn Mathematica support on/off. option(ENABLE_MATHEMATICA "Enable Mathematica support" OFF) # Are we trying to build cadabra as a c++ library? option(BUILD_AS_CPP_LIBRARY "Build cadabra as a C++ library" OFF) if (BUILD_AS_CPP_LIBRARY) enable_testing() add_subdirectory(c++lib) configure_file( "${PROJECT_SOURCE_DIR}/core/Config.hh.in" "${PROJECT_SOURCE_DIR}/core/Config.hh" ) # Bail out early. return() endif() # Switch between GTK4 and GTK3. option(USE_GTK4 "Build for GTK4 (instead of GTK3)" OFF) # Include packaging logic. include(cmake/packaging.cmake) #--------------------------------------------------------------------------- # User options and other notifications #--------------------------------------------------------------------------- # option(MSVC_TARGET_CONSOLE "Force Release book on MSVC to display a console window" OFF) option(APPIMAGE_MODE "Run in AppImage mode, overriding path settings" OFF) if(APPIMAGE_MODE) message(STATUS "Building for AppImage packaging (Debian paths, MicroTeX)") if("${CMAKE_INSTALL_PREFIX}" STREQUAL "/usr") else() MESSAGE(FATAL_ERROR "Building with -DAPPIMAGE_MODE=ON also requires -DCMAKE_INSTALL_PREFIX=/usr") endif() endif() option(PACKAGING_MODE "Run in packaging mode, overriding path settings" OFF) if (PACKAGING_MODE) message(STATUS "Building in packaging mode") if("${CMAKE_INSTALL_PREFIX}" STREQUAL "/usr") else() MESSAGE(FATAL_ERROR "Building with -DPACKAGING_MODE=ON also requires -DCMAKE_INSTALL_PREFIX=/usr") endif() else() message(STATUS "Building in user mode") endif() option(ENABLE_JUPYTER "Enable building the Xeus-based Jupyter kernel" OFF) option(ENABLE_PY_JUPYTER "Enable building the default Jupyter kernel" ON) if(ENABLE_JUPYTER) # Currently only possible when building against Conda. set(CONDA_FOUND TRUE) else() set(CONDA_FOUND FALSE) endif() option(BUILD_TESTS "Build tests" ON) if (BUILD_TESTS) message(STATUS "Building tests") # Allows tests to be built in all subdirectories. enable_testing() endif() option(ENABLE_FRONTEND "Enable the UI frontend" ON) option(ENABLE_SYSTEM_JSONCPP "Use the system-provided jsoncpp library" OFF) option(INSTALL_TARGETS_ONLY "Only install targets; skipping icons, shared libraries etc..." OFF) if (INSTALL_TARGETS_ONLY) message(STATUS "INSTALL_TARGETS_ONLY is enabled, please make sure all auxillary files and programs Cadabra requires are already installed") endif() #--------------------------------------------------------------------------- # Compiler flags. #--------------------------------------------------------------------------- # - Set the C++ standard to C++17 # - Turn optimizations on # - Turn off warnings we don't need include(CheckIPOSupported) check_ipo_supported(RESULT IPO_SUPPORTED OUTPUT error) if( IPO_SUPPORTED ) message(STATUS "IPO / LTO enabled") else() message(STATUS "IPO / LTO not supported: <${error}>") endif() # GCC if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX) if (ENABLE_FRONTEND) if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.9) message(FATAL_ERROR "GCC version must be at least 4.9 for regex support! See http://askubuntu.com/questions/428198/getting-installing-gcc-g-4-9-on-ubuntu and then set the environment variables CXX to g++-4.9 and CC to gcc-4.9. You may have to erase the build directory before re-running cmake.") endif() if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 14.0) message(STATUS "This version of g++ (${CMAKE_CXX_COMPILER_VERSION}) incorrectly warns about possibly uninitialised memory when using std::variant containing a std::shared_ptr. Disabling this warning.") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-maybe-uninitialized") endif() endif() set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O2 -Wall -Wextra -Wunused -Wno-psabi -Wno-unknown-pragmas -Wno-misleading-indentation -fvisibility=hidden -Wno-unused-but-set-variable -Wno-unused-parameter") endif() # Clang if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") # For Clang, need to additionally check version to avoid compiler bugs if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 3.5) message(FATAL_ERROR "Clang version must be at least 3.5 to avoid known bugs.") endif() set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O2 -fvisibility=hidden -Wall -Wextra -Wunused -Wno-unused-parameter -Wno-null-pointer-subtraction") endif() # Visual Studio if(MSVC) set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) # Disable specific warnings set(MSVC_FLAGS "/wd4250" # inherits via dominance (rampant in the GTKMM codebase) "/wd4101" # unreferenced local variable "/wd4244" # conversion from x to y, possible loss of data "/wd4267" # same as 4244 "/wd4305" # truncation from '' to 'char' "/wd4309" # truncation of constant value "/wd4390" # empty control statement, due to a DEBUG macro which requires trailing ; "/wd4996" # deprecated POSIX functions "-D_CRT_SECURE_NO_WARNINGS" # don't warn about deprecated functions "-D_SCL_SECURE_NO_WARNINGS" # don't warn about unsafe function calls (e.g. std::copy with raw pointers) "-DNOMINMAX" # prevent windows headers from defining min and max macros "-DWIN32_LEAN_AND_MEAN" # stop windows from including a bunch of garbage "-DBOOST_ALL_DYN_LINK" # ensure boost's auto-linking is enabled ) foreach(FLAG ${MSVC_FLAGS}) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAG}") endforeach() endif() #--------------------------------------------------------------------------- # Configure the various parts of Cadabra. #--------------------------------------------------------------------------- # if(MATHEMATICA_FOUND) # # To avoid issues finding Mathematica's libWSTP64i4, # # when linking to Mathematica we set the RPATH. # # That's not something we want to do in general, as e.g. Debian's # # packages are not supposed to set RPATH. # SET(CMAKE_SKIP_BUILD_RPATH FALSE) # SET(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE) # SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib") # SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) # endif() configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/config/postinst.in" "${CMAKE_CURRENT_BINARY_DIR}/postinst" @ONLY ) configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/config/install_script.iss.in" "${CMAKE_CURRENT_SOURCE_DIR}/config/install_script.iss" ) configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/config/pre_install.rtf.in" "${CMAKE_CURRENT_SOURCE_DIR}/config/pre_install.rtf" ) configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/config/science.cadabra.cadabra2-gtk.desktop.in" "${CMAKE_CURRENT_SOURCE_DIR}/config/science.cadabra.cadabra2-gtk.desktop" ) configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/frontend/gtkmm/science.cadabra.cadabra2-gtk.appdata.xml.in" "${CMAKE_CURRENT_SOURCE_DIR}/frontend/gtkmm/science.cadabra.cadabra2-gtk.appdata.xml" ) #--------------------------------------------------------------------------- # Configure Mathematica (if enabled). #--------------------------------------------------------------------------- if(ENABLE_MATHEMATICA) print_header("Configuring Mathematica") cmake_policy(SET CMP0077 NEW) set(Mathematica_USE_STATIC_LIBRARIES TRUE) find_package(Mathematica COMPONENTS WSTP) endif() #--------------------------------------------------------------------------- # Configure Python. #--------------------------------------------------------------------------- print_header("Configuring Python") include(GNUInstallDirs) set(Python_POSTFIX "3") find_package(Python REQUIRED COMPONENTS Interpreter Development) set(PYTHON_EXECUTABLE ${Python_EXECUTABLE} CACHE INTERNAL "") set(PYBIND11_PYTHON_VERSION ${Python_VERSION} CACHE INTERNAL "") find_package(pybind11 CONFIG) if (NOT pybind11_FOUND OR pybind11_VERSION VERSION_LESS 2.13.6) if(pybind11_FOUND) message(STATUS "Found pybind11 with version ${pybind11_VERSION} < 2.13.6, using included pybind11 instead.") else() message(STATUS "System-supplied pybind11 not found, using included pybind11.") endif() add_subdirectory(libs/pybind11) endif() message(STATUS "Found python ${Python_LIBRARIES}") message(STATUS "Python version is ${Python_VERSION_MAJOR}.${Python_VERSION_MINOR}.") # The PYTHON_SITE_PATH variable is used for installation purposes # only. It is ideally a path relative to CMAKE_INSTALL_PREFIX, not an # absolute path which uses this variable explictly. See the CMake docs # for `install`. if(WIN32) set(PYTHON_SITE_PATH lib/python${Python_VERSION_MAJOR}.${Python_VERSION_MINOR}/site-packages) # NOTE: if you change the CDB_BIN_PATH, you also need to change the stripping # logic in InstallPrefix.cc. Note that CDB_BIN_PATH needs to be a relative path. set(CDB_BIN_PATH .) else() set(CDB_BIN_PATH bin) if(PACKAGING_MODE AND IS_DEBIAN_PACKAGE) # Debian packages install all their Python things in 'dist-packages', not 'site-packages'. set(PYTHON_SITE_PATH lib/python${Python_VERSION_MAJOR}.${Python_VERSION_MINOR}/dist-packages) # set(Python_SITE_DIST "dist-packages") elseif(APPIMAGE_MODE) set(PYTHON_SITE_PATH lib/python${Python_VERSION_MAJOR}.${Python_VERSION_MINOR}/dist-packages) set(Python_SITE_DIST "dist-packages") else() # The builder can override the path by setting it externally. if(NOT DEFINED PYTHON_SITE_PATH) # set(PYTHON_SITE_PATH ${Python_SITEARCH}) # For everyone else there is 'site-packages' which we get from # calling into python's 'site' package (and hoping that the 0th # element is where we should be writing). execute_process( COMMAND ${Python_EXECUTABLE} -c "import site; print(site.getsitepackages()[0])" OUTPUT_VARIABLE PYTHON_SITE_PATH OUTPUT_STRIP_TRAILING_WHITESPACE ) endif() endif() endif() if(IS_ABSOLUTE ${PYTHON_SITE_PATH}) # CPack on windows complains if `install` commands contain absolute paths, # so we do our best to make PYTHON_SITE_PATH relative to CMAKE_INSTALL_PREFIX. # Of course, the net effect will be the same, as `install` with a relative # DESTINATION will prepend CMAKE_INSTALL_PREFIX. message(STATUS "Making PYTHON_SITE_PATH relative if possible") string(REPLACE "${CMAKE_INSTALL_PREFIX}/" "" PYTHON_SITE_PATH_REL "${PYTHON_SITE_PATH}") set(PYTHON_SITE_PATH ${PYTHON_SITE_PATH_REL}) else() message(STATUS "PYTHON_SITE_PATH is already relative") endif() message(STATUS "PYTHON_SITE_PATH = ${PYTHON_SITE_PATH}") if(IS_ABSOLUTE ${PYTHON_SITE_PATH}) message(STATUS "Installing Cadabra Python module in ${PYTHON_SITE_PATH}") message(STATUS "Installing Cadabra packages in ${PYTHON_SITE_PATH}/cdb/") else() message(STATUS "Installing Cadabra Python module in ${CMAKE_INSTALL_PREFIX}/${PYTHON_SITE_PATH}") message(STATUS "Installing Cadabra packages in ${CMAKE_INSTALL_PREFIX}/${PYTHON_SITE_PATH}/cdb/") endif() message(STATUS "Installing binaries in ${CMAKE_INSTALL_PREFIX}/bin/") message(STATUS "Installing manual pages in ${CMAKE_INSTALL_PREFIX}/share/man/") message(STATUS "Installing fonts/icons in ${CMAKE_INSTALL_PREFIX}/share/cadabra2/") if("${Python_CDB_EXECUTABLE}" STREQUAL "") # We start the cadabra2 python script by using the current environment, # so that e.g. Fedora 42 does not hard-code the python path as a # dependency. However, on macOS with Homebrew, we need to be able # to override this because otherwise we will not be running in the # venv which homebrew created for our package. See cadabra2.rb and # cadabra2-devel.rb where this is used. set(Python_CDB_EXECUTABLE "/usr/bin/env python3") endif() message(STATUS "Starting cadabra2 using '${Python_CDB_EXECUTABLE}'") message(STATUS "For reference:") message(STATUS " Python executable (Python_EXECUTABLE) ${Python_EXECUTABLE}") message(STATUS " Python standard platform-independent installation directory (Python_STDLIB) ${Python_STDLIB}") message(STATUS " Python standard platform-dependent installation directory (Python_STDARCH) ${Python_STDARCH}") message(STATUS " Python 3rd-party platform-independent installation directory (Python_SITELIB) ${Python_SITELIB}") message(STATUS " Python 3rd-party platform-dependent installation directory (Python_SITEARCH) ${Python_SITEARCH}") # We need to give our Python module an abi-name extension # so that it can be installed in a folder which does not # contain the abi name. See # https://www.python.org/dev/peps/pep-3149/ execute_process( COMMAND ${Python_EXECUTABLE} -c "import sysconfig; print(sysconfig.get_config_var('SOABI'))" OUTPUT_VARIABLE Python_SOABI OUTPUT_STRIP_TRAILING_WHITESPACE ) message(STATUS "Python abi name ${Python_SOABI}") # Suffixes if(WIN32) set(STATIC_LIB_SUFFIX "lib") set(SHARED_LIB_SUFFIX "dll") set(Python_MOD_SUFFIX "pyd") set(CMAKE_FIND_LIBRARY_PREFIXES "lib" ${CMAKE_FIND_LIBRARY_PREFIXES}) set(CMAKE_SHARED_LIBRARY_SUFFIX ".dll") set(CMAKE_FIND_LIBRARY_SUFFIXES ".dll.a") else() set(STATIC_LIB_SUFFIX "a") set(SHARED_LIB_SUFFIX "so") set(Python_MOD_EXT "so") set(Python_MOD_SUFFIX "${Python_SOABI}.so") endif() message(STATUS "Python module extension ${Python_MOD_SUFFIX}") #--------------------------------------------------------------------------- # Add subdirectories to project. #--------------------------------------------------------------------------- print_header("Build tools and options") # Mimalloc makes the cadabra2 module crash when run # as jupyter kernel, so disable for now. #find_package(mimalloc 2.0 QUIET) if(mimalloc_FOUND) message(STATUS "Using mimalloc allocator") else() message(STATUS "Using glibc allocator") endif() find_package(Catch2 3 QUIET) # This is optional; we will not run the C++ tests if Catch2 is not found if(Catch2_FOUND) message(STATUS "Will run C++ tests with Catch2") include(Catch) else() message(STATUS "Catch2 not found, will not run C++ tests") endif() # Jupyter kernel print_header("Configuring Jupyter kernel") if(ENABLE_JUPYTER) message(STATUS "Building the Xeus-based Jupyter kernel") # Currently only possible when building against Conda. set(CONDA_FOUND TRUE) else() set(CONDA_FOUND FALSE) if(ENABLE_PY_JUPYTER) message(STATUS "Building the default Jupyter kernel") else() message(STATUS "Not building a Jupyter kernel") endif() endif() if(ENABLE_PY_JUPYTER) add_subdirectory(jupyterkernel) endif() # Core/packages add_subdirectory(client_server) add_subdirectory(core) # Frontend if(ENABLE_FRONTEND) set(ENABLE_MICROTEX TRUE) if(ENABLE_MICROTEX) set(USE_MICROTEX TRUE) set(tinyxml2_BUILD_TESTING FALSE) endif() add_subdirectory(frontend) endif() # Tests if(BUILD_TESTS) add_subdirectory(tests) endif() add_subdirectory(web2 EXCLUDE_FROM_ALL) # Generate the core/Config.hh file; this needs to come as late as possible # in this CMakeLists.txt to ensure that all variables have been set. configure_file( "${PROJECT_SOURCE_DIR}/core/Config.hh.in" "${PROJECT_SOURCE_DIR}/core/Config.hh" ) # Some additional logic to install all runtime dependencies of our binaries # into the target installation directory on windows. if(WIN32) set(EXECUTABLES core/cadabra2-cli core/cdb-nbtool client_server/cadabra-server frontend/gtkmm/cadabra2-gtk ) list(JOIN EXECUTABLES " " LEXECUTABLES) # Custom command to run ldd, get dependencies, and install these in a # folder ready to be processed by `install`. The 'ldd' command does not # run on the '*.pyd' file, but if we rename or copy it to have extension '.dll' # all goes through fine... set(LDDSTR "ldd /${MSYS_ENV}/lib/gdk-pixbuf-2.0/2.10.0/loaders/pixbufloader_svg.dll core/cadabra2.dll ${LEXECUTABLES} | sed -e '/not found/d' -e '/Windows/d' -e '/System32/d' -e '/SysWOW64/d' | grep '=>' | sed -e 's/^[^=]*=>[ ]*\\([^ ]*\\).*/\\1/' | sort | uniq > ${CMAKE_BINARY_DIR}/ldd_dependencies.txt") message(STATUS "Determining dependencies using command ${LDDSTR}") add_custom_command( OUTPUT dummy1 COMMAND cp core/cadabra2.pyd core/cadabra2.dll COMMAND ${CMAKE_COMMAND} -E env bash -c "${LDDSTR}" COMMAND touch dummy1 VERBATIM DEPENDS core/cadabra2.pyd ${EXECUTABLES} COMMENT "Using ldd to determine dependencies of ${EXECUTABLES}" ) add_custom_command( OUTPUT dummy2 DEPENDS dummy1 COMMAND ${CMAKE_COMMAND} -E env bash -c "cat ${CMAKE_BINARY_DIR}/ldd_dependencies.txt && mkdir -p ${CMAKE_SOURCE_DIR}/deps && for f in `cat ${CMAKE_BINARY_DIR}/ldd_dependencies.txt`; do cp \${f} ${CMAKE_SOURCE_DIR}/deps/; done" COMMAND touch dummy2 VERBATIM COMMENT "Copying dependencies into ${CMAKE_SOURCE_DIR}/deps" ) add_custom_target("do_deps" ALL DEPENDS dummy2) # Install all the dependencies into the root destination folder. install(DIRECTORY deps/ DESTINATION . FILES_MATCHING PATTERN "*.dll") # And install a few more which, for reasons unknown, are not reported by dll. install(CODE "execute_process(COMMAND ls \"/${MSYS_ENV}/bin\") " ) winstall(FILES /${MSYS_ENV}/bin/libcharset-1.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/librsvg-2-2.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libxml2-2.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/liblzma-5.dll DESTINATION .) # FIXME: these can be found by running ldd on numpy and matplotlib dlls. winstall(FILES /${MSYS_ENV}/bin/libopenblas.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libgomp-1.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libgfortran-5.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libquadmath-0.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libsharpyuv-0.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libjpeg-8.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libtiff-6.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libdeflate.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libjbig-0.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libLerc.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libwebp-7.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libzstd.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libimagequant.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libopenjp2-7.dll DESTINATION .) # These are necessary only on ARM64. winstall(FILES /${MSYS_ENV}/bin/libgmp-10.dll DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/libgmpxx-4.dll DESTINATION .) # We need gdbus to setup the dbus, needed by Glib, otherwise anything # gtk-related will just bail out at start. We also need the helper # program to spawn programs using Glib. winstall(FILES /${MSYS_ENV}/bin/gdbus.exe DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/gspawn-win64-helper.exe DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/gspawn-win64-helper-console.exe DESTINATION .) winstall(FILES /${MSYS_ENV}/bin/gdk-pixbuf-query-loaders.exe DESTINATION .) endif() #--------------------------------------------------------------------------- # Provide uninstall target. #--------------------------------------------------------------------------- configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in" "${CMAKE_CURRENT_BINARY_DIR}/cmake/cmake_uninstall.cmake" IMMEDIATE @ONLY ) add_custom_target(uninstall "${CMAKE_COMMAND}" -P "${CMAKE_CURRENT_BINARY_DIR}/cmake/cmake_uninstall.cmake" ) #--------------------------------------------------------------------------- # Provide target to build AppImage. #--------------------------------------------------------------------------- # if(APPIMAGE_MODE) # add_custom_target(appimage # COMMAND mkdir -p AppDir/${Python_SITELIB} # COMMAND cp -a ${CMAKE_SOURCE_DIR}/config/AppRun AppDir/ # COMMAND chmod gou+x ${CMAKE_SOURCE_DIR}/config/AppRun AppDir/AppRun # COMMAND cp -a ${Python_SITELIB}/setuptools AppDir/${Python_SITELIB}/ # COMMAND cp -a ${Python_STDARCH}/* AppDir/${Python_STDARCH}/ # ) # endif() if(WIN32) add_custom_target(windows-installer COMMAND cpack # COMMAND osslsigncode sign -pkcs12 "/mnt/c/path/to/certificate.p12" -pass "certificate password" -n "Cadabra2" -i "https://cadabra.science" -t "http://timestamp.comodoca.com/authenticode" -in "cadabra2-${CADABRA_VERSION_SEM}-win64.exe" -out "cadabra2-${CADABRA_VERSION_SEM}-win64-installer.exe" COMMAND gh auth setup-git COMMAND release upload "${CADABRA_VERSION_SEM}" cadabra2-${CADABRA_VERSION_SEM}-win64.msi --clobber ) endif() print_header("All scripts completed") ================================================ FILE: CODE_OF_CONDUCT.md ================================================ # Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at kasper.peeters@phi-sci.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] [homepage]: http://contributor-covenant.org [version]: http://contributor-covenant.org/version/1/4/ ================================================ FILE: CONTRIBUTING.md ================================================ How to contribute ================= If you want to help out with Cadabra, or think you can contribute a useful add-on package or perhaps just a sample notebook or a bit of documentation, you are more than welcome! Even just dropping us a note with some details about what computations you do with Cadabra is a useful contribution, and helps to get a better idea of what people expect from the software. Below are some tips on how to get started contributing to Cadabra. For any questions, please post in the [questions and answers][1] forum or get in touch directly via [email][2] Get familiar with the software ------------------------------ The first thing to do is to get familiar with how the software works, by playing with the [tutorials][3] Details of the software, including some of the logic that is behind its inner workings, are described in the [reference guide][4], and documentation is available separately for all [properties and algorithms][5]. If you get stuck, do not hesitate to post a question on the [questions and answers][1] site. If you want to contribute at the level of the C++ core, or if you are simply interested in how things work behind the scenes, you will find the [doxygen][6] documentation useful. Identify something to work on ----------------------------- Most likely you will have used (or will have tried to use) Cadabra for a concrete project, and found that you see room for improvement. This can be as simple as reporting a bug, perhaps even fixing it, or adding a tutorial or other documentation, providing add-on functionality in the form of packages, or helping with the core. We are also always interested to hear about issues installing Cadabra on new systems. Setup your development environment ---------------------------------- Contributions in any form are welcome, but if you want to submit substantial code contributions, it is useful to get familiar with the git version control system. This is used to track software changes over time and to effectively manage contributions from different authors. We also utilise [github][7], a web interface to git, extensively and use it for communication, issue tracking, merging patches (pull requests) and so on. Check out the source code from there and follow the instructions on how to build it. Code conventions ---------------- To generate debug output, we use the [dbg][8] facility. To turn this on for a particular source file, comment out the `DBG_MACRO_DISABLE` definition at the top. You will then get nicely formatted debug output while running. Code is formatted using tabs for indentation, K&R style braces, with some fine-tuning. All is taken care of by `make format` in the top-level directory, which runs through the entire source tree and does a reformat using `astyle`. Documentation ------------- We have tutorials, manual pages and reference documentation. In addition, there are user-contributed notebooks. In this sense, we try to follow [divio][9]. Contributions to all of these parts of the documentation are welcome. [1] https://cadabra.science/qa/ [2] mailto:info@cadabra.science [3] https://cadabra.science/tutorials.html [4] https://cadabra.science/help.html [5] https://cadabra.science/man.html [6] https://cadabra.science/doxygen/html/ [7] https://github.com/kpeeters/cadabra2 [8] https://github.com/sharkdp/dbg-macro [9] https://www.divio.com/blog/documentation/ ================================================ FILE: JUPYTER.rst ================================================ Building the Cadabra Jupyter kernel =================================== The Cadabra build scripts can now build a Jupyter kernel, so that you can use the Jupyter notebook to write Cadabra code (using all of the Cadabra notation, i.e. without having to resort to the much more ugly Python interface). At the moment this is only supported by compiling against a Conda python, simply because that enables us to build on the 'xeus' library more easily. Building a Conda package ------------------------ After installation, first activate your miniconda distribution:: source ~/miniconda3/bin/activate All dependencies to build a Conda package of Cadabra can then be installed from Conda directly, with:: conda install conda-build Then build with:: cd conda conda-build . To install:: conda install --use-local cadabra2 Building using Conda (old) -------------------------- The following instructions have been tested on a clean Ubuntu 18.04 installation. The Cadabra Jupyter kernel uses the Xeus library, which is most easily obtained by getting it from Conda. If you do not have Conda yet, get a minimal installation (MiniConda) from https://docs.conda.io/en/latest/miniconda.html (install a Python3.x version). When building against Conda, Cadabra will build only the Python module and the cadabra-jupyter-kernel binary. It is not possible to build many of the other parts of Cadabra using Conda, for various reasons: Conda's glibmm is not built with C++11 enabled, there is no gtkmm library, and probably others. For a discussion on this, see https://groups.google.com/a/continuum.io/d/msg/anaconda/oHtExJU9oiM/oMZLGpn1CAAJ and if you don't think this is a problem, see e.g. https://unix.stackexchange.com/questions/414904/anaconda-qt-vs-system-qt After installation, first activate your miniconda distribution:: source ~/miniconda3/bin/activate All dependencies for Cadabra's Jupyter kernel can then be installed from Conda directly, with:: conda install cmake pkg-config glibmm zeromq cppzmq xtl cryptopp \ sqlite util-linux xeus nlohmann_json sympy \ jupyter -c conda-forge Now it is time to do the Cadabra build. Configure with options which ensure that CMake picks up the Conda libraries first, and make it install the Cadabra things in a place which does not interfere with any 'normal' build you may have sitting around:: cd cadabra2 mkdir build cd build cmake -DENABLE_JUPYTER=ON -DENABLE_FRONTEND=OFF \ -DCMAKE_INCLUDE_PATH=${HOME}/miniconda3/include \ -DCMAKE_LIBRARY_PATH=${HOME}/miniconda3/lib \ -DCMAKE_INSTALL_PREFIX=${HOME}/miniconda3 \ .. You should see that it has configured using the Conda Python; look for the `Configuring Python` block, which should be something like:: ------------------------------------------- Configuring Python ------------------------------------------- -- Building for use with Python 3 (good!) -- Found PythonInterp: /home/kasper/miniconda3/bin/python3.7 (found version "3.7.1") -- Found PythonLibs: /home/kasper/miniconda3/lib/libpython3.7m.so -- pybind11 v2.3.dev0 -- Found python /home/kasper/miniconda3/lib/libpython3.7m.so Note the reference to the Conda installation path. Further down you should then also see a block for the Jupyter kernel:: ------------------------------------------- Configuring Jupyter kernel build ------------------------------------------- If that's all ok, you can build with the standard:: make sudo make install This will install the kernel in:: ${HOME}/miniconda3/bin/ and the JSON configuration files in:: ${HOME}/miniconda3/share/jupyter/kernels/cadabra/ If you now start Jupyter, you should be able to choose a Cadabra kernel:: ${HOME}/miniconda3/bin/jupyter notebook There is a sample `schwarzschild.ipynb` in the `examples` directory. Setting up a Jupyterhub server for Cadabra ------------------------------------------ The following instructions setup a JupyterHub installation using 'The Littlest JupyterHub' (TLJH). These instructions have been tested on a clean Ubuntu 18.04 installation. First install TLJH as per the instructions at:: https://the-littlest-jupyterhub.readthedocs.io/en/latest/install/custom-server.html (note that you *first* need to do a sudo command, otherwise the installer will ask for the password but you won't see that prompt, making it look like the installation process hangs). *New*: it should now also be possible to simply install the conda package for the Jupyter kernel. *New* Become root (you cannot write into `/opt/tljh` otherwise) and set the conda path using:: sudo su export PATH=/opt/tljh/user/bin:${PATH} Install the prerequisites with:: conda install cmake pkg-config glibmm zeromq cppzmq xtl cryptopp \ sqlite util-linux xeus nlohmann_json sympy \ -c conda-forge Build the Cadabra Jupyter kernel with:: cd cadabra2 mkdir build cd build cmake -DENABLE_JUPYTER=ON -DENABLE_FRONTEND=OFF \ -DCMAKE_INCLUDE_PATH=/opt/tljh/user/include \ -DCMAKE_LIBRARY_PATH=/opt/tljh/user/lib \ -DCMAKE_INSTALL_PREFIX=/opt/tljh/user/ \ .. make install The 'new' button in the Jupyterhub file browser should now offer you the option of creating a new Cadabra notebook. Creating a Conda package of the Cadabra Jupyter kernel ------------------------------------------------------ To build a Conda package of the Cadabra Jupyter kernel from scratch, first install miniconda as above, and activate:: source ~/miniconda3/bin/activate Now the fun starts. Conda is an absolutely horrendous packaging system, which absolutely does *not* get dependencies right, but we will have to live with it. First, update the base conda distribution:: conda update -n base -c defaults conda conda update --all Then activate the `conda-forge` channel, and update to the latest of everything:: conda config --add channels conda-forge conda update --all Do *not* use `conda config --set channel_priority strict` as that *will* break the build with an endless list of package conflicts. There are other ways to add the conda-forge channel, all subtly different; avoid adding `-c conda-forge` as that is just broken beyond belief too. Now install the prerequisites for building conda packages:: conda install conda-build anaconda-client \ xeus pkg-config glibmm That last line should not have been necessary, as build requirements in `meta.yaml` should have taken care of it, but alas, it does not work that way. It spits out various messages about packages being *downgraded*; don't ask, I told you the system was broken. Now change to the `conda` directory and build the package:: cd cadabra2/conda export PKG_CONFIG_PATH=${HOME}/miniconda3/lib/pkgconfig conda build . Again, that path setting should have been handled automatically... To upload:: anaconda login anaconda upload /path/to/conda-package.tar.bz2 ================================================ FILE: LICENSE ================================================ GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. {one line to give the program's name and a brief idea of what it does.} Copyright (C) {year} {name of author} This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: {project} Copyright (C) {year} {fullname} This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . ================================================ FILE: Makefile ================================================ all: @echo -n "\nTo build Cadabra, \n\n mkdir build\n cd build\n cmake ..\n make\n\nThe other targets here are (for maintainer purposes only)\n\n tarball: build a tarball cadabra2-latest.tar.gz of current HEAD\n doc: generate doxygen docs in doc\n webup: build web pages/tutorials/man pages and upload to server\n updatesnoop: sync snoop repo\n packages: create deb/rpm packages on buildbot\n\nIf you need help, email info@cadabra.science\n\n" .PHONY: doc tarball findclay webbuild claybuild webup format packages updatesnoop tarball: git archive --format=tar --prefix=cadabra2-latest/ HEAD | gzip > ${HOME}/tmp/cadabra2-latest.tar.gz doc: doxygen config/Doxyfile webbuild: cd build; make -f web2/Makefile claybuild: cd web2/cadabra2/source; rm -Rf build; clay build CMD_NOT_FOUND = $(error $(1) is required for this rule) CHECK_CMD = $(if $(shell command -v $(1)),,$(call CMD_NOT_FOUND,$(1))) findclay: $(call CHECK_CMD, clay) webup: findclay webbuild claybuild doxygen config/Doxyfile rsync -avz --chmod=+rx doxygen/ cadabra_web:/var/www/cadabra2/doxygen/ cd web2/cadabra2/source; rsync -avz --chmod=+rx build/ cadabra_web:/var/www/cadabra2/; rsync -avz --chmod=+rx static/styles/ cadabra_web:/var/www/cadabra2/static/styles; scp static/cadabra_in* cadabra_web:/var/www/cadabra2/static/; rsync -avz --chmod=+rx static/fonts/ cadabra_web:/var/www/cadabra2/static/fonts; rsync -avz --chmod=+rx static/images/ cadabra_web:/var/www/cadabra2/static/images/; rsync -avz --chmod=+rx static/icons/ cadabra_web:/var/www/cadabra2/static/icons/; rsync -avz --chmod=+rx static/pdf/ cadabra_web:/var/www/cadabra2/static/pdf/; rsync -avz --chmod=+rx static/js/ cadabra_web:/var/www/cadabra2/static/js/; rsync -avz --chmod=+r static/robots.txt cadabra_web:/var/www/cadabra2 format: astyle --style=k/r --indent=tab=3 --recursive --attach-classes --attach-namespaces --indent-classes --indent-namespaces --indent-switches --break-closing-braces '*.hh' astyle --style=k/r --indent=tab=3 --recursive --attach-classes --attach-namespaces --indent-classes --indent-namespaces --indent-switches --break-closing-braces '*.cc' find . -name "*.cc" -exec sed -i -e 's/^\([ \t]*\)\([\{\}]\)/\1\t\2/' '{}' ';' find . -name "*.hh" -exec sed -i -e 's/^\([ \t]*\)\([\{\}]\)/\1\t\2/' '{}' ';' packages: bash config/buildbot.sh appimage: (mkdir build-appimage; cd build-appimage; cmake -DAPPIMAGE_MODE=ON -DCMAKE_INSTALL_PREFIX=/usr ..; make; make install DESTDIR=AppDir; make appimage) updatesnoop: cp ../snoop/src/Snoop.cc ../snoop/src/SnoopPrivate.hh ../snoop/src/Snoop.hh client_server/ ================================================ FILE: README.rst ================================================ Cadabra ======= .. image:: https://joss.theoj.org/papers/10.21105/joss.01118/status.svg :target: https://doi.org/10.21105/joss.01118 .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.2500762.svg :target: https://doi.org/10.5281/zenodo.2500762 .. image:: https://github.com/kpeeters/cadabra2/workflows/Linux/badge.svg :target: https://github.com/kpeeters/cadabra2/actions?query=workflow%3ALinux .. image:: https://github.com/kpeeters/cadabra2/workflows/macOS/badge.svg :target: https://github.com/kpeeters/cadabra2/actions?query=workflow%3AmacOS .. image:: https://github.com/kpeeters/cadabra2/workflows/Docker/badge.svg :target: https://github.com/kpeeters/cadabra2/actions?query=workflow%3ADocker .. image:: https://github.com/kpeeters/cadabra2/workflows/Windows%2011/badge.svg :target: https://github.com/kpeeters/cadabra2/actions?query=workflow%3AWindows%2011 .. image:: https://github.com/kpeeters/cadabra2/workflows/FreeBSD/badge.svg :target: https://github.com/kpeeters/cadabra2/actions?query=workflow%3AFreeBSD *A field-theory motivated approach to computer algebra.* Kasper Peeters - End-user documentation at https://cadabra.science/ - Source code documentation at https://cadabra.science/doxygen/html This repository holds the 2.x series of the Cadabra computer algebra system. It supersedes the 1.x series, which can still be found at https://github.com/kpeeters/cadabra. Cadabra is a symbolic computer algebra system, designed specifically for the solution of problems encountered in quantum and classical field theory. It has extensive functionality for tensor computer algebra, tensor polynomial simplification including multi-term symmetries, fermions and anti-commuting variables, Clifford algebras and Fierz transformations, implicit coordinate dependence, multiple index types and many more. The input format is a subset of TeX. Both a command-line and a graphical interface are available, and there is a kernel for Jupyter. Installation ------------- Cadabra builds on Linux, macOS, OpenBSD, FreeBSD and Windows. Select your system from the list below for detailed instructions. - `Linux (Debian/Ubuntu/Mint)`_ - `Linux (Fedora 24 and later)`_ - `Linux (CentOS/Scientific Linux)`_ - `Linux (openSUSE)`_ - `Linux (Arch/Manjaro)`_ - `Linux (Solus)`_ - `OpenBSD`_ - `FreeBSD`_ - `macOS`_ - `Windows`_ Binaries for most of these platforms are provided from the download page at https://cadabra.science/download.html, which links to https://github.com/kpeeters/cadabra2/releases/latest. These binaries are automatically generated on every release. See `Building Cadabra as C++ library`_ for instructions on how to build the entire Cadabra functionality as a library which you can use in a C++ program. See `Building a Jupyter kernel`_ for information on the Jupyter kernel for Cadabra sessions. See `Notes on Python paths`_ for some remarks on where Cadabra installs its Python modules and how this plays with various types of Python installations. Linux (Debian/Ubuntu/Mint) ~~~~~~~~~~~~~~~~~~~~~~~~~~ On Debian/Ubuntu you can install all that is needed with:: sudo apt install git cmake libpython3-dev python3-dev g++ libgmp3-dev \ libgtkmm-3.0-dev libboost-all-dev libssl-dev libgmp-dev libsqlite3-dev uuid-dev \ python3-matplotlib python3-mpmath python3-sympy python3-gmpy2 (on Ubuntu 14.04 you need to replace `cmake` with `cmake3` and also install g++-4.9; get in touch if you don't know how to do this). On older systems you may want to install `sympy` using `sudo pip3 install sympy`, but that is discouraged in general. This is the development platform and issues are typically first fixed here. You can use either g++ or the clang++ compiler to build. You need to clone the cadabra2 git repository (if you download the .zip file you will not have all data necessary to build). So first do:: git clone https://github.com/kpeeters/cadabra2 Building is then done with the standard:: cd cadabra2 mkdir build cd build cmake .. make sudo make install This will produce the command line app ``cadabra2`` and the Gtk notebook interface ``cadabra2-gtk``. You can also find the latter in the 'Education' menu. Linux (Fedora 24 and later) ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Fedora 24 is the first Fedora to have Python 3; you can build Cadabra using Python 2 but you are strongly encouraged to upgrade. The Fedora platform receives less testing so please get in touch if you run into any issues. You can use either g++ or the clang++ compiler. Install the dependencies with:: sudo dnf install git python3-devel make cmake gcc-c++ \ gmp-devel libuuid-devel sqlite-devel \ gtkmm30-devel boost-devel \ python3-matplotlib \ python3-pip sudo pip3 install sympy You need to clone the cadabra2 git repository (if you download the .zip file you will not have all data necessary to build). So first do:: git clone https://github.com/kpeeters/cadabra2 Building is then done with the standard:: cd cadabra2 mkdir build cd build cmake .. make sudo make install This will produce the command line app ``cadabra2`` and the Gtk notebook interface ``cadabra2-gtk``. You can also find the latter when searching for the 'Cadabra' app from the 'Activities' menu. Linux (CentOS/Scientific Linux) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ On CentOS/Scientific Linux you need to activate The Software Collections (SCL) and Extra Packages for Enterprise Linux (EPEL) to get access to a modern C++ compiler, Python3 and all required build tools. On *CentOS* first do:: sudo yum install centos-release-scl epel-release On *Scientific Linux* the equivalent is:: sudo yum install yum-conf-softwarecollections epel-release Now install all build dependencies with:: sudo yum install devtoolset-7 rh-python36 cmake3 \ gmp-devel libuuid-devel sqlite-devel \ gtkmm30-devel boost-devel git \ python-matplotlib You need to enable the Python3 and C++ compiler which you just installed with:: scl enable rh-python36 bash scl enable devtoolset-7 bash (note: do *not* use sudo here!). You also need to install sympy by hand:: sudo pip3 install sympy Now need to clone the cadabra2 git repository (if you download the .zip file you will not have all data necessary to build):: git clone https://github.com/kpeeters/cadabra2 Building is then done with the standard:: cd cadabra2 mkdir build cd build cmake3 .. make sudo make install This will produce the command line app ``cadabra2`` and the Gtk notebook interface ``cadabra2-gtk``. You can also find the latter in the 'Education' menu. Linux (openSUSE) ~~~~~~~~~~~~~~~~ For openSUSE (tested on 'Leap 15.2', probably also fine with minor changes for 'Tumbleweed') you first need to install the dependencies with:: sudo zypper install --no-recommends git cmake python3-devel gcc-c++ \ gmp-devel libuuid-devel sqlite-devel \ gtkmm3-devel \ python3-matplotlib \ python3-sympy \ libboost_system1_71_0-devel libboost_filesystem1_71_0-devel \ libboost_date_time1_71_0-devel libboost_program_options1_71_0-devel This platform receives less testing so please get in touch if you run into any issues. You need to clone the cadabra2 git repository (if you download the .zip file you will not have all data necessary to build). So first do:: git clone https://github.com/kpeeters/cadabra2 Building is then done with the standard:: cd cadabra2 mkdir build cd build cmake .. make sudo make install This will produce the command line app ``cadabra2`` and the Gtk notebook interface ``cadabra2-gtk``. Linux (Arch/Manjaro) ~~~~~~~~~~~~~~~~~~~~ The package for Arch Linux is cadabra2 https://aur.archlinux.org/packages/cadabra2/ Building and installing (including dependencies) can be accomplished with:: yay -S cadabra2 Alternatively use ``makepkg``:: git clone https://aur.archlinux.org/cadabra2.git cd cadabra2 makepkg -si Please consult the Arch Wiki https://wiki.archlinux.org/index.php/Arch_User_Repository#Installing_packages for more information regarding installing packages from the AUR. Linux (Solus) ~~~~~~~~~~~~~ Support for Solux Linux is experimental. To build from source on Solus Linux, first install the dependencies by doing:: sudo eopkg install -c system.devel sudo eopkg install libboost-devel gmp-devel libgtkmm-3-devel sudo eopkg install sqlite3-devel python3-devel sudo eopkg install git cmake make g++ Then configure and build with:: cd cadabra2 mkdir build cd build cmake .. -DCMAKE_INSTALL_PREFIX=/usr make sudo make install This installs below ``/usr`` (instead of ``/usr/local`` on other platforms) because I could not figure out how to make it pick up libraries there. Any feedback on these instructions is welcome. OpenBSD ~~~~~~~ Install the dependencies with:: pkg_add git cmake boost python-3.6.2 gtk3mm gmp gmpxx py3-sympy We will build using the default clang-4.0.0 compiler; building with the alternative g++-4.9.4 leads to trouble when linking against the libraries added with pkg_add. Configure and build with:: cd cadabra2 mkdir build cd build cmake -DENABLE_MATHEMATICA=OFF .. make su make install The command-line version is now available as ``cadabra2`` and the notebook interface as ``cadabra2-gtk``. Any feedback on this platform is welcome as this is not our development platform and testing is done only occasionally. FreeBSD ~~~~~~~ The recommended way to install Cadabra is through:: pkg install cadabra2 It is also possible to build and install Cadabra from the port:: cd /usr/ports/math/cadabra2 && make install clean The command-line version is now available as ``cadabra2`` and the notebook interface as ``cadabra2-gtk``. Any feedback on this platform is welcome as this is not our development platform. macOS ~~~~~ Cadabra builds with the standard Apple compiler, on both Intel and Apple silicon, but you do need a number of packages from Homebrew (see https://brew.sh). Install the required dependencies with:: brew install cmake boost gmp python3 brew install pkgconfig brew install gtkmm3 adwaita-icon-theme pip3 install sympy gmpy2 If the lines above prompt you to install XCode, go ahead and let it do that. You can build against an Anaconda Python installation (in case you prefer Anaconda over the Homebrew Python); cmake will automatically pick this up if available. You need to clone the cadabra2 git repository (if you download the .zip file you will not have all data necessary to build). So do:: git clone https://github.com/kpeeters/cadabra2 After that you can build with the standard:: cd cadabra2 mkdir build cd build cmake -DENABLE_MATHEMATICA=OFF .. make sudo make install (*note* the `-DENABLE_MATHEMATICA=OFF` in the `cmake` line above; the Mathematica scalar backend does not yet work on macOS). This will produce the command line app ``cadabra2`` and the Gtk notebook interface ``cadabra2-gtk``. Feedback from macOS users is *very* welcome because this is not the main development platform. Windows ~~~~~~~ On Windows compilation is easiest by using the MSYS2 system, as their gtkmm-3.0 packages just work and the whole system can be driven from the command line. We used to build Cadabra using the vcpkg packages, but they no longer provide packages for gtkmm-3.0, and in general the lack of binary packages means that build times are on the order of many, many hours, instead of just a few minutes with MSYS2. More info on building and packaging gtk apps on windows at https://www.gtk.org/docs/installations/windows/. Install MSYS2 from https://www.msys2.org and start a UCRT64 shell. First update with (if you don't do this you may end up not being able to install some of the required packages due to version conflicts):: pacman -Suy Then install a compiler and the dependencies of Cadabra with:: pacman -S mingw-w64-ucrt-x86_64-gcc pacman -S mingw-w64-ucrt-x86_64-gtkmm3 pacman -S mingw-w64-ucrt-x86_64-boost pacman -S mingw-w64-ucrt-x86_64-sqlite3 pacman -S mingw-w64-ucrt-x86_64-cmake pacman -S mingw-w64-ucrt-x86_64-python pacman -S mingw-w64-ucrt-x86_64-python-matplotlib pacman -S mingw-w64-ucrt-x86_64-python-sympy pacman -S mingw-w64-ucrt-x86_64-osslsigncode pacman -S git Checkout Cadabra and build:: git clone https://github.com/kpeeters/cadabra2 cd cadabra2 mkdir build cd build cmake .. ninja ninja install This will leave an installation in `Program Files (x86)/Cadabra`, from where you can start `cadabra2-gtk`. To build an installer, simply run `cpack` after having built Cadabra. Building a Jupyter kernel ------------------------- As of version 2.3.4 the standard build process (as described above) also creates a Jupyter kernel, which is written in Python on top of `ipykernel` (thanks to Fergus Baker). This should work on most platforms out-of-the-box; you do not need to do anything else. The Jupyter kernel allows you to use Cadabra notation inside a Jupyter notebook session. The distribution also still contains code for the 'old' Jupyter kernel, which is written in C++ on top of `xeus`. Building this kernel is more complicated mainly because of this dependency, and there is not much of an advantage over the Python kernel; it's mainly left in the tree for future reference, For full instructions on how to build the old `xeus`-based kernel, see https://github.com/kpeeters/cadabra2/blob/master/JUPYTER.rst. Creating an AppImage -------------------- The Cadabra build system can create an AppImage for use on a wide variety of Linux distributions (this is used to create the AppImage which is available from the `releases` on github). This build process has been tested by using Ubuntu 20.04 as base system. Install the prerequisites as above,:: sudo apt install git cmake libpython3-dev python3-dev g++ libgmp3-dev \ libgtkmm-3.0-dev libboost-all-dev libssl-dev libgmp-dev libsqlite3-dev uuid-dev \ python3-matplotlib python3-mpmath python3-sympy python3-gmpy2 Now configure and build with:: cmake -DAPPIMAGE_MODE=ON -DCMAKE_INSTALL_PREFIX=/usr .. make make install DESTDIR=AppDir This installs everything in the `AppDir` folder ready for packaging. Then run:: make appimage to create the AppImage itself. If you run into trouble with this, please first consult the comments in the top-level `CMakeLists.txt` file about `linuxdeploy` and friends. Tutorials and other help ------------------------ Please consult https://cadabra.science/ for tutorial-style notebooks and all other documentation, and https://cadabra.science/doxygen/html/ for doxygen documentation of the current master branch. The latter can also be generated locally; you will need (on Debian and derivatives):: sudo apt-get install doxygen libjs-mathjax For any questions, please contact info@cadabra.science . Building Cadabra as C++ library ------------------------------- If you want to use the functionality of Cadabra inside your own C++ programs, you can build Cadabra as a shared library. To do this:: mkdir build-lib cd build-lib cmake -DBUILD_AS_CPP_LIBRARY=ON .. make sudo make install There is a sample program `simple.cc `_ in the `c++lib` directory which shows how to use the Cadabra library. Notes on Python paths --------------------- Cadabra tries to play nice with a large variety of Python installations, which is not an easy task. In general, it will try to install in such a way that the Python interpreter which is specified at build time will be able to import the `cadabra2` Python module without any change to its path. This is necessary so that e.g. a Jupyter notebook will be able to find this module. Cadabra will therefore install its Python module in `site.getsitepackages()[0]`. Since this module constructs its docstrings dynamically on load, the manual pages are also stored relative to this module. However, Cadabra will install its binaries according to standard CMake logic in `$CMAKE_INSTALL_PREFIX/bin/`. On systems that have Python installed in subtree which is not below `$CMAKE_INSTALL_PREFIX`, this means that the Cadabra binaries and the Cadabra Python module will not be in the same subtree. This typically happens on systems with Python coming from Homebrew, as these will have Python somewhere below `/opt/homebrew` even when `$CMAKE_INSTALL_PREFIX` is `/usr/local/`. On some systems, users or package managers prefer that `site.getsitepackages()[0]` remains under control of the package manager (Homebrew is the typical example). In this case, if you do not want to write there, your only option is to first create a virtual environment before you run Cadabra's `cmake`. Special thanks -------------- Special thanks to José M. Martín-García (for the xPerm canonicalisation code), James Allen (for writing much of the factoring code), Dominic Price (for the meld algorithm implementation, many additions to the notebook interface, the conversion to pybind and the Windows port), Fergus Baker (for the new Jupyter kernel), Isuru Fernando (for the Conda packaging), the Software Sustainability Institute and the Institute of Advanced Study. Thanks to the many people who have sent me bug reports (keep 'm coming), and thanks to all of you who use Cadabra, sent feedback or cited the Cadabra papers. Licenses -------- Cadabra itself is licensed under the GPL-3.0. It includes some dependencies which have the following licenses: * tiny-process-lib [https://gitlab.com/eidheim/tiny-process-library/] MIT license ================================================ FILE: c++lib/.gitignore ================================================ Makefile treetracker ================================================ FILE: c++lib/CMakeLists.txt ================================================ cmake_minimum_required(VERSION 3.12) set(CMAKE_CXX_STANDARD 17) project(Cadabra) #--------------------------------------------------------------------------- # Preamble. #--------------------------------------------------------------------------- # Set path to additional cmake files set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/../cmake/modules") # Disable warning 'MACOSX_RPATH is enabled by default' if (POLICY CMP0042) cmake_policy(SET CMP0042 NEW) endif(POLICY CMP0042) # Disable warning 'Only interpret if() arguments as variables or keywords when unquoted' if (POLICY CMP0054) cmake_policy(SET CMP0054 NEW) endif() # Ensure that we can build the library and install it without having to # build the samples. set(CMAKE_SKIP_INSTALL_ALL_DEPENDENCY TRUE) find_package(Python REQUIRED COMPONENTS Interpreter Development) message(STATUS "Found python library: ${Python_LIBRARIES}") message(STATUS "Found python headers: ${Python_INCLUDE_DIRS}") message(STATUS "Python version is ${Python_VERSION_MAJOR}.${Python_VERSION_MINOR}.") #--------------------------------------------------------------------------- # User options. #--------------------------------------------------------------------------- set(INSTALL_LIB_DIR lib CACHE PATH "Installation directory for library") set(INSTALL_INCLUDE_DIR include/cadabra2++ CACHE PATH "Installation directory for header files") # Scalar backend options option(USE_TREETRACKER "Use the TreeTracker scalar backend" OFF) set(PATH_TREETRACKER "./treetracker" CACHE STRING "Path to the TreeTracker library") #--------------------------------------------------------------------------- # Compiler flags. #--------------------------------------------------------------------------- add_definitions("-DNO_SYMPY") if(CMAKE_COMPILER_IS_GNUCXX) add_definitions("-Wall -g -Wno-unused-but-set-variable") endif() if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX) if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 14.0) message(STATUS "This version of g++ (${CMAKE_CXX_COMPILER_VERSION}) incorrectly warns about possibly uninitialised memory when using std::variant containing a std::shared_ptr. Disabling this warning.") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-maybe-uninitialized") endif() set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2") endif() if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2") endif() if(MSVC) set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) set(MSVC_FLAGS "/wd4101" # unreferenced local variable "/wd4250" # inherits via dominance "/wd4244" # conversion from x to y, possible loss of data "/wd4267" # same as 4244 "/wd4996" # deprecated POSIX functions "-D_CRT_SECURE_NO_WARNINGS" # don't warn about deprecated functions ) foreach(FLAG ${MSVC_FLAGS}) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAG}") endforeach() endif() #--------------------------------------------------------------------------- # Find libraries. #--------------------------------------------------------------------------- # Locate gmpxx. On Homebrew there seems to be constantly something wrong with # the pkgconfig for gmpxx. So we just add the include path by hand. if(APPLE) add_definitions("-I/usr/local/include -I/opt/local/include") endif() if(MSVC) find_package(GMPXX REQUIRED) set(GMP_LIB "${GMPXX_LIBRARIES}") set(GMPXX_LIB "${GMPXX_LIBRARIES}") include_directories("${VCPKG_INCLUDE_DIRS}") else() find_library(GMP_LIB gmp REQUIRED) find_library(GMPXX_LIB gmpxx REQUIRED) message(STATUS "Found gmp ${GMP_LIB}") message(STATUS "Found gmpxx ${GMPXX_LIB}") endif() #--------------------------------------------------------------------------- # Enumerate source files. #--------------------------------------------------------------------------- SET(RESERVED_NODE_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR}/../core/ReservedNode.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Equals.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Sum.cc ) set(LOCAL_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR}/../core/Adjform.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Algorithm.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Cleanup.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Combinatorics.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Compare.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/DisplayBase.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/DisplayTeX.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/DisplaySympy.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/DisplayTerminal.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Grouping.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/TerminalStream.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Multiplier.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/NDSolver.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/NEvaluator.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/NTensor.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/NInterpolatingFunction.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Exceptions.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Exchange.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/ExManip.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Functional.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/IndexIterator.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/IndexClassifier.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Hash.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Kernel.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Parser.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/PreClean.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/PreProcessor.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/ProgressMonitor.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Props.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Stopwatch.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Storage.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/Symbols.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/YoungTab.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Accent.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/AntiCommuting.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/AntiSymmetric.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Commuting.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/CommutingAsProduct.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/CommutingAsSum.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/CommutingBehaviour.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Coordinate.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/DAntiSymmetric.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Depends.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/DependsInherit.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Derivative.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/DerivativeOp.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Determinant.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Diagonal.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/DifferentialForm.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/DiracBar.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Distributable.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/EpsilonTensor.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/ExteriorDerivative.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/FilledTableau.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/GammaMatrix.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/GammaTraceless.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/ImaginaryI.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/ImplicitIndex.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Indices.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Integer.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/InverseMetric.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/KroneckerDelta.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/LaTeXForm.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Matrix.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Metric.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/NonCommuting.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/NumericalFlat.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/PartialDerivative.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/RiemannTensor.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/SatisfiesBianchi.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/SelfAntiCommuting.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/SelfCommuting.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/SelfNonCommuting.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/SortOrder.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Spinor.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Symbol.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Symmetric.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Tableau.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/TableauBase.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/TableauInherit.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/TableauSymmetry.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Trace.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Traceless.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/Weight.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/WeightInherit.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/WeylTensor.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/modules/xperm_new.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/canonicalise.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/collect_components.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/collect_factors.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/collect_terms.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/combine.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/complete.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/decompose_product.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/distribute.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/drop_weight.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/einsteinify.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/eliminate_kronecker.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/eliminate_metric.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/epsilon_to_delta.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/evaluate.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/expand.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/expand_delta.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/expand_diracbar.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/expand_power.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/factor_in.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/factor_out.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/fierz.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/first_order_form.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/flatten_product.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/flatten_sum.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/indexsort.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/integrate_by_parts.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/join_gamma.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/keep_terms.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/lr_tensor.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/order.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/product_rule.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/reduce_delta.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/rename_dummies.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/rewrite_indices.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/simplify.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/sort_product.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/sort_sum.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/split_gamma.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/split_index.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/substitute.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/sym.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/tab_basics.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/take_match.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/replace_match.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/unwrap.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/vary.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/young_project.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/young_project_product.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/young_project_tensor.cc ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/meld.cc SympyDummy.cc ) set(MAIN_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/../core/Adjform.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Algorithm.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Combinatorics.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Compare.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/DisplayBase.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/DisplayTerminal.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/NEvaluator.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Equals.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Exceptions.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/ExManip.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/IndexClassifier.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/IndexIterator.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Kernel.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/lru_cache.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Hash.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Multiplier.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/NTensor.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Parser.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/ProgressMonitor.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Props.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/ReservedNode.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Storage.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Stopwatch.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/Sum.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/TerminalStream.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/tree.hh ${CMAKE_CURRENT_SOURCE_DIR}/../core/YoungTab.hh ) FILE(GLOB ALGO_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/../core/algorithms/*.hh) FILE(GLOB PROP_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/../core/properties/*.hh) # Copy relevant header files to the binary directory file(COPY ${MAIN_HEADERS} DESTINATION ${CMAKE_BINARY_DIR}/include/cadabra2++) file(COPY ${ALGO_HEADERS} DESTINATION ${CMAKE_BINARY_DIR}/include/cadabra2++/algorithms) file(COPY ${PROP_HEADERS} DESTINATION ${CMAKE_BINARY_DIR}/include/cadabra2++/properties) # Create a 'master' header which includes all the above for convenience file(WRITE "${CMAKE_BINARY_DIR}/include/cadabra2++.hh" "// Main headers\n") foreach(HEADER ${MAIN_HEADERS}) get_filename_component(FILENAME "${HEADER}" NAME) file(APPEND "${CMAKE_BINARY_DIR}/include/cadabra2++.hh" "#include \"cadabra2++/${FILENAME}\"\n") endforeach() file(APPEND "${CMAKE_BINARY_DIR}/include/cadabra2++.hh" "\n// Properties\n") foreach(HEADER ${PROP_HEADERS}) get_filename_component(FILENAME "${HEADER}" NAME) file(APPEND "${CMAKE_BINARY_DIR}/include/cadabra2++.hh" "#include \"cadabra2++/properties/${FILENAME}\"\n") endforeach() file(APPEND "${CMAKE_BINARY_DIR}/include/cadabra2++.hh" "\n// Algorithms\n") foreach(HEADER ${ALGO_HEADERS}) get_filename_component(FILENAME "${HEADER}" NAME) file(APPEND "${CMAKE_BINARY_DIR}/include/cadabra2++.hh" "#include \"cadabra2++/algorithms/${FILENAME}\"\n") endforeach() file(READ ${CMAKE_CURRENT_SOURCE_DIR}/cpplib.hh.in CPPLIB_HH_IN) file(APPEND "${CMAKE_BINARY_DIR}/include/cadabra2++.hh" "\n//Helper functions\n${CPPLIB_HH_IN}") set(LIB_INCLUDE_DIRS "." "${CADABRA_CORE_DIR}" "${CADABRA_LIBS_DIR}/pybind11/include" "${CADABRA_LIBS_DIR}/internal/include" "${CADABRA_LIBS_DIR}/dbg" ${Python_INCLUDE_DIRS} ) message("${LIB_INCLUDE_DIRS}") if(USE_TREETRACKER) add_definitions(-DUSE_TREETRACKER) if(EXISTS "${PATH_TREETRACKER}/treetracker.h" OR EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/${PATH_TREETRACKER}/treetracker.h") include_directories(${PATH_TREETRACKER}) message("-- Using the TreeTracker scalar backend at ${PATH_TREETRACKER}") else() message(FATAL_ERROR "-- Cannot find the 'treetracker.h' file in '${PATH_TREETRACKER}'") endif() else() message("-- Not using the TreeTracker scalar backend") endif() #--------------------------------------------------------------------------- # Targets #--------------------------------------------------------------------------- add_library(cadabra2++objects OBJECT ${LOCAL_SRC_FILES} ${RESERVED_NODE_SRC_FILES}) set_property(TARGET cadabra2++objects PROPERTY POSITION_INDEPENDENT_CODE 1) target_include_directories(cadabra2++objects PUBLIC "${LIB_INCLUDE_DIRS}") add_library(cadabra2++ SHARED $) set_target_properties(cadabra2++ PROPERTIES PUBLIC_HEADER "${PUBLIC_HEADER_FILES}") target_link_libraries(cadabra2++ ${GMPXX_LIB} ${GMP_LIB} ${Python_LIBRARIES}) add_library(cadabra2++_static STATIC $) target_link_libraries(cadabra2++_static ${GMPXX_LIB} ${GMP_LIB} ${Python_LIBRARIES}) if(TBB_FOUND) target_link_libraries(cadabra2++ TBB::tbb) target_compile_definitions(cadabra2++objects PRIVATE HAS_TBB) endif() # Trivial example add_executable(trivial trivial.cc) target_include_directories(trivial PUBLIC "${CADABRA_LIBS_DIR}/pybind11/include" "${CMAKE_BINARY_DIR}/include" "${CMAKE_BINARY_DIR}/include/cadabra2++" ${Python_INCLUDE_DIRS} ) target_link_libraries(trivial cadabra2++) # Sample executable add_executable(simple simple.cc) target_include_directories(simple PUBLIC "${CADABRA_LIBS_DIR}/pybind11/include" "${CMAKE_BINARY_DIR}/include" "${CMAKE_BINARY_DIR}/include/cadabra2++" ${Python_INCLUDE_DIRS} ) target_link_libraries(simple cadabra2++) # Test for adjform add_executable(adjform adjform.cc) target_include_directories(adjform PUBLIC "${CADABRA_LIBS_DIR}/pybind11/include" "${CMAKE_BINARY_DIR}/include" "${CMAKE_BINARY_DIR}/include/cadabra2++" ${Python_INCLUDE_DIRS} ) target_link_libraries(adjform cadabra2++) # Test for numerical evaluation add_executable(nevaluate nevaluate.cc) target_include_directories(nevaluate PUBLIC "${CADABRA_LIBS_DIR}/pybind11/include" "${CMAKE_BINARY_DIR}/include" "${CMAKE_BINARY_DIR}/include/cadabra2++" ${Python_INCLUDE_DIRS} ) target_link_libraries(nevaluate cadabra2++) #--------------------------------------------------------------------------- # Installation #--------------------------------------------------------------------------- install(TARGETS cadabra2++ LIBRARY DESTINATION "${INSTALL_LIB_DIR}") install(TARGETS cadabra2++_static LIBRARY DESTINATION "${INSTALL_LIB_DIR}" ARCHIVE DESTINATION "${INSTALL_LIB_DIR}") install(DIRECTORY ${CMAKE_BINARY_DIR}/include DESTINATION "${INSTALL_INCLUDE_DIR}") install(DIRECTORY DESTINATION bin DIRECTORY_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE ) install(DIRECTORY DESTINATION lib DIRECTORY_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE ) ================================================ FILE: c++lib/README.txt ================================================ This directory contains code to illustrate the use of Cadabra from within C++ programs. ================================================ FILE: c++lib/SympyDummy.cc ================================================ #include "Parser.hh" #include "Cleanup.hh" #include "PreClean.hh" #include "SympyCdb.hh" #include "DisplaySympy.hh" #ifdef USE_TREETRACKER #include "treetracker.h" #endif #include cadabra::Ex::iterator sympy::apply(const cadabra::Kernel& kernel, cadabra::Ex& ex, cadabra::Ex::iterator& it, const std::vector& head, std::vector args, const std::string& method) { std::ostringstream str; if(head.size()>0) str << head[0] << "("; cadabra::DisplaySympy ds(kernel, ex); ds.output(str, it); if(head.size()>0) if(args.size()>0) str << ", " << args[0] << ")"; str << method; if(head.size()>0) str << ")"; // std::cerr << "Send: " << str.str() << std::endl; #ifdef USE_TREETRACKER auto res = TreeTracker::FromString(str.str()); res.RecursiveSimplify(); std::stringstream istr; res.ShowTree(istr, 0, false, true); // std::cerr << "Return: " << istr.str() << std::endl; auto ptr = std::make_shared(); cadabra::Parser parser(ptr); istr >> parser; pre_clean_dispatch_deep(kernel, *parser.tree); cleanup_dispatch_deep(kernel, *parser.tree); //parser.tree->print_recursive_treeform(std::cerr, parser.tree->begin()); ds.import(*parser.tree); cadabra::Ex::iterator first=parser.tree->begin(); it = ex.move_ontop(it, first); #endif return it; } void sympy::invert_matrix(const cadabra::Kernel& kernel, cadabra::Ex& ex, cadabra::Ex& rules, const cadabra::Ex& tocompute) { throw std::logic_error("Not implemented: sympy::invert_matrix"); } void sympy::determinant(const cadabra::Kernel&, cadabra::Ex& ex, cadabra::Ex& rules, const cadabra::Ex& tocompute) { throw std::logic_error("Not implemented: sympy::determinant"); } void sympy::trace(const cadabra::Kernel&, cadabra::Ex& ex, cadabra::Ex& rules, const cadabra::Ex& tocompute) { throw std::logic_error("Not implemented: sympy::trace"); } ================================================ FILE: c++lib/adjform.cc ================================================ #include "cadabra2++.hh" #include using namespace cadabra; using namespace cadabra::cpplib; int main(int, char**) { Kernel k(true); pprint_enable_utf8(); //{\mu,\nu}::Indices(vector). //tr{#}::Trace. //u^{\mu}::SelfNonCommuting. //u^{\mu}::ImplicitIndex. //ex:=tr{A u^{\nu} u^{\mu} u^{\mu} u^{\nu} + B u^{\mu} u^{\mu} u^{\nu} u^{\nu}}: //meld(_); inject_property(k, "{A,B,C,D }"); inject_property(k, "tr{#}"); auto ex = R"(tr(A B C D + B C D A)"_ex(k); meld m(k, *ex); std::cout << pprint(k, ex) << '\n'; m.apply_pre_order(); std::cout << pprint(k, ex) << '\n'; // assert ex == $2 * Tr{ A B C D }$ } ================================================ FILE: c++lib/cpplib.hh.in ================================================ #ifdef _MSC_VER #define WIN32_LEAN_AND_MEAN #include #endif namespace cadabra { namespace cpplib { using ExPtr = std::shared_ptr; namespace detail { struct ExConstructorProxy { ExConstructorProxy(const char* data) : data(data) {} ExPtr operator () (Kernel& kernel) { return kernel.ex_from_string(data); } std::string data; }; struct ExPrettyPrinter { ExPrettyPrinter(const Kernel& kernel, Ex::iterator it, bool use_unicode = true) : kernel(kernel) , ex(it) , use_unicode(use_unicode) {} const Kernel& kernel; Ex ex; bool use_unicode; }; } ExPtr copy(const ExPtr& ex) { return std::make_shared(*ex); } template void inject_property(Kernel& kernel, const ExPtr& pattern, const ExPtr& args = nullptr) { kernel.inject_property(new Prop(), pattern, args); } template void inject_property(Kernel& kernel, const char* pattern, const ExPtr& args = nullptr) { inject_property(kernel, kernel.ex_from_string(pattern), args); } template void inject_property(Kernel& kernel, const ExPtr& pattern, const char* args) { inject_property(kernel, pattern, kernel.ex_from_string(args)); } template void inject_property(Kernel& kernel, const char* pattern, const char* args) { inject_property(kernel, kernel.ex_from_string(pattern), kernel.ex_from_string(args)); } template ExPtr apply(const Kernel& kernel, const ExPtr& ex, Args&&... args, bool deep = true, bool repeat = false, unsigned int depth = 0) { Algo algo(kernel, *ex, std::forward(args)...); algo.apply_generic(deep, repeat, depth); return ex; } template ExPtr apply(const Kernel& kernel, const ExPtr& ex, Ex::iterator it, Args&&... args, bool deep = true, bool repeat = false, unsigned int depth = 0) { Algo algo(kernel, *ex, std::forward(args)...); algo.apply_generic(it, deep, repeat, depth); return ex; } template ExPtr apply_preorder(const Kernel& kernel, const ExPtr& ex, Args&&... args) { Algo algo(kernel, *ex, std::forward(args)...); algo.apply_pre_order(); return ex; } inline void pprint_enable_utf8() { // Set the codepage on windows to utf8 #ifdef _MSC_VER SetConsoleOutputCP(65001); #endif } inline detail::ExConstructorProxy operator "" _ex(const char* ex, std::size_t n) { return detail::ExConstructorProxy(ex); } inline detail::ExPrettyPrinter pprint(const Kernel& kernel, const ExPtr& ex, bool use_unicode = true) { return detail::ExPrettyPrinter(kernel, ex->begin(), use_unicode); } inline detail::ExPrettyPrinter pprint(const Kernel& kernel, const Ex& ex, bool use_unicode = true) { return detail::ExPrettyPrinter(kernel, ex.begin(), use_unicode); } inline detail::ExPrettyPrinter pprint(const Kernel& kernel, Ex::iterator it, bool use_unicode = true) { return detail::ExPrettyPrinter(kernel, it, use_unicode); } inline std::ostream& operator << (std::ostream& stream, const detail::ExPrettyPrinter& pretty_printer) { DisplayTerminal dt(pretty_printer.kernel, pretty_printer.ex, pretty_printer.use_unicode); dt.output(stream); return stream; } } } ================================================ FILE: c++lib/nevaluate.cc ================================================ #include "cadabra2++.hh" #include using namespace cadabra; using namespace cadabra::cpplib; int main(int, char **) { Kernel k(true); pprint_enable_utf8(); //{\mu,\nu}::Indices(vector). // tr{#}::Trace. // u^{\mu}::SelfNonCommuting. // u^{\mu}::ImplicitIndex. // ex:=tr{A u^{\nu} u^{\mu} u^{\mu} u^{\nu} + B u^{\mu} u^{\mu} u^{\nu} // u^{\nu}}: meld(_); // inject_property(k, "{A,B,C,D }"); // inject_property(k, "tr{#}"); // Scalar broadcast NTensor t3a( { 2.0 } ); std::cerr << "t3a.shape.size() = " << t3a.shape.size() << std::endl; std::cerr << "t3a.shape[0] = " << t3a.shape[0] << std::endl; NTensor t432a=t3a.broadcast( {4, 1}, 1 ); std::cerr << t432a << std::endl; // NTensor broadcast NTensor t3( { 1.0, 2.0, 3.0 } ); NTensor t432=t3.broadcast( {4,3,2}, 1 ); std::cerr << t432 << std::endl; NTensor t3c( { 1.0, 2.0, 3.0 } ); NTensor t432c1=t3c.broadcast( {3,3,2}, 1 ); std::cerr << t432c1 << std::endl; NTensor t432c2=t3c.broadcast( {3,3,2}, 0 ); std::cerr << t432c2 << std::endl; // Multiplying two scalar variables which each take // an array of values leads to an outer product. auto ex1b = "B*A + A"_ex(k); NEvaluator ev1b(ex1b->begin()); ev1b.set_variable(Ex("A"), NTensor({1.0, 2.0, 3.0})); ev1b.set_variable(Ex("B"), NTensor({0.5, 1.0, 5.0, 10.0})); // This should give a {3, 4} tensor. auto res1b = ev1b.evaluate(); std::cout << "B*A + A = " << res1b << "\n\n"; auto ex1 = "B*A + C"_ex(k); NEvaluator ev1(ex1->begin()); ev1.set_variable(Ex("A"), NTensor({1.0, 2.0, 3.0})); ev1.set_variable(Ex("B"), NTensor({0.5, 1.0, 5.0, 10.0})); ev1.set_variable(Ex("C"), NTensor({1.0, -1.0})); // This should give a {3, 4, 2} tensor. auto res1 = ev1.evaluate(); std::cout << "B*A + C = " << res1 << "\n\n"; // Trigonometric functions. auto ex2 = R"( A + B \cos( C ) )"_ex(k); std::cout << pprint(k, ex2) << '\n'; NTensor nt2({2,4}, 0.0); nt2.at({1,2}) = 3.1415; for(auto& v: nt2.values) std::cout << v << ", "; std::cout << "\n\n"; std::cout << nt2 << std::endl; NEvaluator ev(ex2->begin()); ev.set_variable(Ex("C"), { 3.0 }); ev.set_variable(Ex("B"), { 2.3 }); ev.set_variable(Ex("A"), { 1.2 }); auto res2 = ev.evaluate(); std::cout << "A + B cos(C) = " << res2 << "\n\n"; // Double trig. Stopwatch sw; auto ex3 = R"( \cos(x) \sin(y) )"_ex(k); NEvaluator ev3(ex3->begin()); ev3.set_variable(Ex("x"), NTensor::linspace(0.0, 3.14, 1000)); ev3.set_variable(Ex("y"), NTensor::linspace(0.0, 3.14, 1000)); sw.start(); int num=100; for(int i=0; i #include /// \file simple.cc /// \ingroup libcadabra /// /// Sample program to demonstrate the use of Cadabra directly from C++ code. void test1() { // The following few lines are equivalent to entering // // {r,t}::Coordinate. // {m,n}::Indices(values={t,r}, position=free). // ex:= A_{m} A^{m}; // rl:= A_{t} = 3 + a; // evaluate(ex, rl); // // in the Cadabra notebook. cadabra::Kernel kernel; kernel.inject_property(new cadabra::Coordinate(), kernel.ex_from_string("{r,t}"), 0); kernel.inject_property(new cadabra::Indices(), kernel.ex_from_string("{m,n}"), kernel.ex_from_string("values={t,r}, position=free")); auto ex = kernel.ex_from_string("A_{m} A^{m}"); auto rl = kernel.ex_from_string("A_{t} = 3 + a "); cadabra::evaluate ev(kernel, *ex, *rl); ev.apply_generic(); // Pretty-printing stream object. cadabra::TerminalStream ss(kernel, std::cerr); ss << ex << std::endl; } void test2() { // The following few lines are equivalent to entering // // {m,n,p,q}::Indices(position=free). // \partial{#}::PartialDerivative; // ex:= \int{ F_{m n} F^{m n} }{x}; // rl:= F_{m n} = \\partial_{m}{A_{n}} - \\partial_{n}{A_{m}}; // substitute(ex, rl, deep=True); // // in the Cadabra notebook. cadabra::Kernel kernel; auto ind1 = kernel.ex_from_string("{m,n,p,q}"); auto ind2 = kernel.ex_from_string("position=free"); kernel.inject_property(new cadabra::Indices(), ind1, ind2); auto pd = kernel.ex_from_string("\\partial{#}"); kernel.inject_property(new cadabra::PartialDerivative(), pd, 0); auto ex = kernel.ex_from_string("\\int{ F_{m n} F^{m n} }{x}"); auto rl = kernel.ex_from_string("F_{m n} = \\partial_{m}{A_{n}} - \\partial_{n}{A_{m}}"); // Pretty-printing stream object. cadabra::TerminalStream ss(kernel, std::cerr); ss << ex << std::endl; ss << rl << std::endl; // Apply the 'substitute' algorithm. cadabra::substitute subs(kernel, *ex, *rl); subs.apply_generic(); ss << ex << std::endl; } int main(int argc, char **argv) { test1(); test2(); } ================================================ FILE: c++lib/trivial.cc ================================================ #include "cadabra2++.hh" #include using namespace cadabra; using namespace cadabra::cpplib; int main() { Kernel k(true); inject_property(k, "{A,B}"); auto ex = "A B - B A"_ex(k); sort_product sp(k, *ex); sp.apply_generic(); collect_terms ct(k, *ex); ct.apply_generic(); std::cout << pprint(k, ex) << std::endl; } ================================================ FILE: client_server/Actions.cc ================================================ #include "Actions.hh" #include "DataCell.hh" #include "DocumentThread.hh" #include "GUIBase.hh" #include #include #include using namespace cadabra; #define DEBUG(ln) // #define DEBUG(ln) ln ActionBase::ActionBase(DataCell::id_t id) : ref_id(id) { } bool ActionBase::undoable() const { return true; } void ActionBase::execute(DocumentThread& cl, GUIBase& ) { if(ref_id.id==0) { // A zero ID means the current cell. ref = cl.current_cell; return; } else { // Lookup the cell with the given ID. auto it=cl.doc.begin(); while(it!=cl.doc.end()) { if((*it).id().id==ref_id.id) { ref=it; return; } ++it; } } // Not found, throw exception. std::string class_name = boost::core::demangle(typeid(*this).name()); throw std::logic_error(class_name + ": cannot find cell with id "+std::to_string(ref_id.id)); } ActionAddCell::ActionAddCell(DataCell cell, DataCell::id_t ref_id, Position pos_, bool activate_) : ActionBase(ref_id) , newcell(cell) , pos(pos_) , activate(activate_) , is_replacement(false) , is_input_form(false) { } void ActionAddCell::execute(DocumentThread& cl, GUIBase& gb) { ActionBase::execute(cl, gb); // Insert this DataCell into the DTree document. We first need // to figure out whether we already have a cell with the DataCell's // cell_id; in this case we have to replace, not append/insert. auto it=cl.doc.begin(); while(it!=cl.doc.end()) { if((*it).id().id==newcell.id().id) { // FIXME: right now we only change textbuf. DEBUG( std::cerr << "found! " << it->id().id << ", " << static_cast(it->cell_type) << std::endl; ) it->textbuf=newcell.textbuf; gb.update_cell(cl.doc, it); is_replacement=true; return; } ++it; } // If we get here we have to append/insert. DEBUG( std::cerr << "ActionAddCell::execute: add cell with id " << newcell.id().id; ) switch(pos) { case Position::before: newref = cl.doc.insert(ref, newcell); DEBUG( std::cerr << " before "; ) break; case Position::after: newref = cl.doc.insert_after(ref, newcell); DEBUG( std::cerr << " after "; ) break; case Position::child: newref = cl.doc.append_child(ref, newcell); DEBUG( std::cerr << " as child of "; ) break; } DEBUG( std::cerr << " " << ref->id().id << std::endl; ) child_num=cl.doc.index(newref); DEBUG( std::cerr << "ActionAddCell::execute: added as child " << child_num << ": |" << newcell.textbuf << "|" << std::endl; ) gb.add_cell(cl.doc, newref, true); if(activate) gb.position_cursor(cl.doc, newref, -1); if(newcell.cell_type == DataCell::CellType::input_form) is_input_form=true; } void ActionAddCell::revert(DocumentThread& cl, GUIBase& gb) { // Remove the GUI cell from the notebook and then // remove the corresponding DataCell from the DTree. DEBUG( std::cerr << "ActionAddCell::revert: removing child " << child_num << std::endl; ) DTree::sibling_iterator ch; switch(pos) { case Position::before: // `ref` is a cell after our cell. ch = cl.doc.child(cl.doc.parent(ref), child_num); break; case Position::after: // `ref` is a cell before our cell. ch = cl.doc.child(cl.doc.parent(ref), child_num); break; case Position::child: // `ref` is a parent of our cell. ch = cl.doc.child(ref, child_num); break; } DEBUG( std::cerr << "ActionAddCell::revert: removing cell " << ch->textbuf << std::endl; ) gb.remove_cell(cl.doc, ch); // std::cerr << "ActionAddCell::revert: finally erase datacell" << std::endl; cl.doc.erase(ch); } bool ActionAddCell::undoable() const { return !(is_replacement || is_input_form); } ActionPositionCursor::ActionPositionCursor(DataCell::id_t ref_id, Position pos_) : ActionBase(ref_id), needed_new_cell_with_id(0), pos(pos_) { } void ActionPositionCursor::execute(DocumentThread& cl, GUIBase& gb) { ActionBase::execute(cl, gb); switch(pos) { case Position::in: // std::cerr << "in" << std::endl; newref = ref; break; case Position::next: { DTree::sibling_iterator sib=ref; bool found=false; while(cl.doc.is_valid(++sib)) { if(sib->cell_type==DataCell::CellType::python || sib->cell_type==DataCell::CellType::latex) { if(!sib->hidden) { newref=sib; found=true; break; } } } if(!found) { if(ref->textbuf=="") { // If the last cell is empty, stay where we are. newref=ref; } else { // Make sure that we store the generated cell id so we // can re-use it if we execute this in redo. if(needed_new_cell_with_id > 0) { DataCell::id_t id; id.id = needed_new_cell_with_id; DataCell newcell(id, DataCell::CellType::python, ""); newref = cl.doc.insert(sib, newcell); } else { DataCell newcell(DataCell::CellType::python, ""); needed_new_cell_with_id=newcell.id().id; newref = cl.doc.insert(sib, newcell); } } } break; } case Position::previous: { bool found=false; DTree::sibling_iterator sib=ref; while(cl.doc.is_valid(--sib)) { if(sib->cell_type==DataCell::CellType::python || sib->cell_type==DataCell::CellType::latex) { if(!sib->hidden) { newref=sib; found=true; break; } } } if(!found) newref=ref; // No previous sibling cell. FIXME: walk tree structure break; } } // Update GUI. if(needed_new_cell_with_id > 0) { // std::cerr << "cadabra-client: adding new visual cell before positioning cursor" << std::endl; gb.add_cell(cl.doc, newref, true); } // std::cerr << "cadabra-client: positioning cursor" << std::endl; gb.position_cursor(cl.doc, newref, -1); DEBUG( std::cerr << "ActionPositionCursor::execute: done" << std::endl; ) } void ActionPositionCursor::revert(DocumentThread& cl, GUIBase& gb) { if(needed_new_cell_with_id > 0) { gb.remove_cell(cl.doc, newref); cl.doc.erase(newref); } gb.position_cursor(cl.doc, ref, -1); DEBUG( std::cerr << "ActionPositionCursor::revert: done" << std::endl; ) } ActionRemoveCell::ActionRemoveCell(DataCell::id_t ref_id) : ActionBase(ref_id) { } ActionRemoveCell::~ActionRemoveCell() { } void ActionRemoveCell::execute(DocumentThread& cl, GUIBase& gb) { ActionBase::execute(cl, gb); gb.remove_cell(cl.doc, ref); reference_parent_cell = cl.doc.parent(ref); reference_child_index = cl.doc.index(ref); removed_tree=DTree(ref); DEBUG( std::cerr << "removed has " << cl.doc.number_of_children(ref) << " children" << std::endl; ) cl.doc.erase(ref); } void ActionRemoveCell::revert(DocumentThread& cl, GUIBase& gb) { DEBUG( std::cerr << "need to undo a remove cell at index " << reference_child_index << std::endl; ) DTree::iterator newcell; if(cl.doc.number_of_children(reference_parent_cell)==0) { newcell = cl.doc.append_child(reference_parent_cell, removed_tree.begin()); } else { auto it = cl.doc.child(reference_parent_cell, reference_child_index); // ++it; newcell = cl.doc.insert_subtree(it, removed_tree.begin()); DEBUG( std::cerr << "added doc cell " << newcell->textbuf << " at " << &(*newcell) << " before " << it->textbuf << std::endl; ) } gb.add_cell(cl.doc, newcell, true); DEBUG( std::cerr << "added vis rep" << std::endl; ) } ActionReplaceCell::ActionReplaceCell(DataCell::id_t ref_id) : ActionBase(ref_id) { } ActionReplaceCell::~ActionReplaceCell() { } void ActionReplaceCell::execute(DocumentThread& cl, GUIBase& gb) { } void ActionReplaceCell::revert(DocumentThread& cl, GUIBase& gb) { } bool ActionReplaceCell::undoable() const { return false; } ActionSplitCell::ActionSplitCell(DataCell::id_t ref_id) : ActionBase(ref_id) { } ActionSplitCell::~ActionSplitCell() { } void ActionSplitCell::execute(DocumentThread& cl, GUIBase& gb) { ActionBase::execute(cl, gb); size_t pos = gb.get_cursor_position(cl.doc, ref); std::string segment1=ref->textbuf.substr(0, pos); std::string segment2=ref->textbuf.substr(pos); // Strip leading newline in 2nd segment, if any. if(segment2.size()>0) { if(segment2[0]=='\n') segment2=segment2.substr(1); } DataCell newcell(ref->cell_type, segment2); newref = cl.doc.insert_after(ref, newcell); ref->textbuf=segment1; gb.add_cell(cl.doc, newref, true); gb.update_cell(cl.doc, ref); } void ActionSplitCell::revert(DocumentThread&, GUIBase& ) { // FIXME: implement } ActionSetRunStatus::ActionSetRunStatus(DataCell::id_t ref_id, bool running) : ActionBase(ref_id), new_running_(running) { } bool ActionSetRunStatus::undoable() const { return false; } void ActionSetRunStatus::execute(DocumentThread& cl, GUIBase& gb) { ActionBase::execute(cl, gb); gb.update_cell(cl.doc, ref); was_running_=ref->running; ref->running=new_running_; } void ActionSetRunStatus::revert(DocumentThread&, GUIBase& ) { } ActionRunCell::ActionRunCell(DataCell::id_t ref_id) : ActionBase(ref_id), run_all_cells(false) { } ActionRunCell::ActionRunCell() : ActionBase(DataCell::id_t()), run_all_cells(true) { } ActionRunCell::~ActionRunCell() { } bool ActionRunCell::undoable() const { return false; } void ActionRunCell::execute(DocumentThread& cl, GUIBase& gb) { if(!run_all_cells) { ActionBase::execute(cl, gb); cl.run_cell(ref, false); } else { cl.run_all_cells(); } } void ActionRunCell::revert(DocumentThread&, GUIBase& ) { } ActionOpen::ActionOpen(const std::string& n) : ActionBase(DataCell::id_t()), notebook_name(n) { } ActionOpen::~ActionOpen() { } bool ActionOpen::undoable() const { return false; } void ActionOpen::execute(DocumentThread& cl, GUIBase& gb) { std::ifstream file(notebook_name); std::string content, line; while(std::getline(file, line)) content+=line; cl.load_from_string(content); } void ActionOpen::revert(DocumentThread&, GUIBase& ) { } ActionSetVariableList::ActionSetVariableList(DataCell::id_t ref_id, std::set variables) : ActionBase(ref_id), new_variables_(variables) { } bool ActionSetVariableList::undoable() const { return false; } void ActionSetVariableList::execute(DocumentThread& cl, GUIBase& gb) { ActionBase::execute(cl, gb); ref->variables_referenced=new_variables_; } void ActionSetVariableList::revert(DocumentThread&, GUIBase& ) { } ActionInsertText::ActionInsertText(DataCell::id_t ref_id, int pos, const std::string& content) : ActionBase(ref_id), insert_pos(pos), text(content) { } void ActionInsertText::execute(DocumentThread& cl, GUIBase& gb) { ActionBase::execute(cl, gb); ref->textbuf.insert(insert_pos, text); DEBUG( std::cerr << "ActionInsertText::execute: textbuf now |" << ref->textbuf << "|" << std::endl; ) gb.update_cell(cl.doc, ref); } void ActionInsertText::revert(DocumentThread& cl, GUIBase& gb) { ref->textbuf.erase(insert_pos, text.size()); DEBUG( std::cerr << "ActionInsertText::revert: textbuf now |" << ref->textbuf << "|" << std::endl; ) gb.update_cell(cl.doc, ref); } ActionCompleteText::ActionCompleteText(DataCell::id_t ref_id, int pos, const std::string& content, int alt) : ActionBase(ref_id), insert_pos(pos), text(content), alternative_(alt) { } void ActionCompleteText::execute(DocumentThread& cl, GUIBase& gb) { ActionBase::execute(cl, gb); auto endpos = ref->textbuf.insert(insert_pos, text); // std::cerr << "complete: textbuf now |" << ref->textbuf << "|" << std::endl; gb.update_cell(cl.doc, ref); gb.position_cursor(cl.doc, ref, insert_pos+text.size()); } void ActionCompleteText::revert(DocumentThread& cl, GUIBase& gb) { ref->textbuf.erase(insert_pos, text.size()); gb.update_cell(cl.doc, ref); gb.position_cursor(cl.doc, ref, insert_pos); } int ActionCompleteText::length() const { return text.size(); } int ActionCompleteText::alternative() const { return alternative_; } ActionEraseText::ActionEraseText(DataCell::id_t ref_id, int start, int end) : ActionBase(ref_id), from_pos(start), to_pos(end) { } void ActionEraseText::execute(DocumentThread& cl, GUIBase& gb) { ActionBase::execute(cl, gb); DEBUG( std::cerr << from_pos << ", " << to_pos << std::endl; ) removed_text=ref->textbuf.substr(from_pos, to_pos-from_pos); ref->textbuf.erase(from_pos, to_pos-from_pos); } void ActionEraseText::revert(DocumentThread& cl, GUIBase& gb) { ref->textbuf.insert(from_pos, removed_text); gb.update_cell(cl.doc, ref); gb.position_cursor(cl.doc, ref, from_pos+removed_text.size()); } ================================================ FILE: client_server/Actions.hh ================================================ #pragma once #include "DataCell.hh" #include "DocumentThread.hh" #include namespace cadabra { class DocumentThread; class GUIBase; /// \ingroup clientserver /// /// All actions derive from the ActionBase object, which defines /// the interface they need to implement. These objects are used to /// pass (user) action instructions around. They can be stored in /// undo/redo stacks. All actions run on the GUI thread. The /// update_gui members typically call members of the GUIBase class. /// Action objects are allowed to modify the DTree document doc, /// since they essentially contain code which is part of the /// DocumentThread object. /// /// All modifications to the document are done by calling 'perform' with an /// action object. This enables us to implement an undo stack. This method /// will take care of making the actual change to the DTree document, and /// call back on the 'change' methods above to inform the derived class /// that a change has been made. class ActionBase { public: ActionBase(DataCell::id_t ref_id); /// Perform the action. This should update both the document /// tree data structure and the GUI. The latter is updated /// by calling relevant methods on the GUIBase object passed /// in. /// /// The base class just looks up the cell given its `id_t`. /// If your action does not refer to a cell at all, you do /// not need to call the base class `execute`. virtual void execute(DocumentThread&, GUIBase&); /// Revert the change to the DTree document and the GUI. virtual void revert(DocumentThread&, GUIBase&)=0; /// Can this action be undone? virtual bool undoable() const; DataCell::id_t ref_id; /// If you want a callback once this action has finished, /// set it here before queuing. std::function callback; protected: DTree::iterator ref; }; /// \ingroup clientserver /// /// Add a cell to the notebook. class ActionAddCell : public ActionBase { public: enum class Position { before, after, child }; ActionAddCell(DataCell, DataCell::id_t ref_, Position pos_, bool activate=false); virtual ~ActionAddCell() {}; virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; /// Can this action be undone? virtual bool undoable() const override; private: // Keep track of the location where this cell is inserted into // the notebook. DataCell newcell; DTree::iterator newref; Position pos; int child_num; bool activate; // If we are replacing a cell, keep track of that so we // report that we are not undoable. bool is_replacement; // For input-form cells, we want no undo, as they will go // when the owner cell will be reverted. bool is_input_form; }; /// \ingroup clientserver /// /// Position the cursor relative to the indicated cell. If position is 'next' and /// there is no input cell following the indicated one, create a new one. class ActionPositionCursor : public ActionBase { public: enum class Position { in, next, previous }; ActionPositionCursor(DataCell::id_t ref_id_, Position pos_); virtual ~ActionPositionCursor() {}; virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; private: uint64_t needed_new_cell_with_id; DTree::iterator newref; Position pos; }; /// \ingroup clientserver /// /// Update the running status of the indicated cell. class ActionSetRunStatus : public ActionBase { public: ActionSetRunStatus(DataCell::id_t ref_id_, bool running); virtual ~ActionSetRunStatus() {}; virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; virtual bool undoable() const override; private: DTree::iterator this_cell; bool was_running_, new_running_; }; /// \ingroup clientserver /// /// Update the list of referenced variables in this cell. class ActionSetVariableList : public ActionBase { public: ActionSetVariableList(DataCell::id_t ref_id_, std::set); virtual ~ActionSetVariableList() {}; virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; virtual bool undoable() const override; private: DTree::iterator this_cell; std::set new_variables_; }; /// \ingroup clientserver /// /// Remove a cell and all its child cells from the document. class ActionRemoveCell : public ActionBase { public: ActionRemoveCell(DataCell::id_t ref_id_); virtual ~ActionRemoveCell(); virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; private: // Keep track of the location where this cell (and its child // cells) was in the notebook. We keep a reference to the // parent cell and the index of the current cell as child of // that parent. DTree removed_tree; DTree::iterator reference_parent_cell; size_t reference_child_index; }; /// \ingroup clientserver /// /// Replace the contents of a cell. Not undo-able. class ActionReplaceCell : public ActionBase { public: ActionReplaceCell(DataCell::id_t ref_id_); virtual ~ActionReplaceCell(); virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; virtual bool undoable() const override; private: }; /// \ingroup clientserver /// /// Split a cell into two separate cells, at the point of the cursor. class ActionSplitCell : public ActionBase { public: ActionSplitCell(DataCell::id_t ref_id); virtual ~ActionSplitCell(); virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; private: DTree::iterator newref; // the newly created cell }; /// \ingroup clientserver /// /// Run a cell or run all cells. class ActionRunCell : public ActionBase { public: // Run a particular cell. ActionRunCell(DataCell::id_t ref_id); // Run all cells. ActionRunCell(); virtual ~ActionRunCell(); virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; virtual bool undoable() const override; private: bool run_all_cells; }; /// \ingroup clientserver /// /// Open a notebook from a file, in the current window. class ActionOpen : public ActionBase { public: ActionOpen(const std::string&); virtual ~ActionOpen(); virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; virtual bool undoable() const override; private: std::string notebook_name; }; /// \ingroup clientserver /// /// Add a text string (can be just a single character) at the point /// of the cursor. /// This action is assumed to be triggered from a user change to /// the GUI cells, so will not update the GUI itself, only the /// underlying DTree. However, the revert method will need to /// update the GUI representation. class ActionInsertText : public ActionBase { public: ActionInsertText(DataCell::id_t ref_id, int pos, const std::string&); virtual ~ActionInsertText() {}; virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; private: DTree::iterator this_cell; int insert_pos; std::string text; }; /// \ingroup clientserver /// /// Complete text at a point in a GUI cell with one or more /// alternative. /// In contrast to ActionInsertText, this one is triggered from /// the server-side, so will update the GUI both for execute /// and revert. class ActionCompleteText : public ActionBase { public: ActionCompleteText(DataCell::id_t ref_id, int pos, const std::string&, int alternative); virtual ~ActionCompleteText() {}; virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; int length() const; int alternative() const; private: DTree::iterator this_cell; int insert_pos; std::string text; int alternative_; // in case there is more than one completion alternative }; /// \ingroup clientserver /// /// Remove a text string starting at the indicated position, and /// with the indicated length, from the indicated cell. /// This action is assumed to be triggered from a user change to /// the GUI cells, so will not update the GUI itself, only the /// underlying DTree. However, the revert method will need to /// update the GUI representation. class ActionEraseText : public ActionBase { public: ActionEraseText(DataCell::id_t ref_id, int, int); virtual ~ActionEraseText() {}; virtual void execute(DocumentThread&, GUIBase&) override; virtual void revert(DocumentThread&, GUIBase&) override; private: DTree::iterator this_cell; int from_pos, to_pos; std::string removed_text; }; } // // class ActionMergeCells ================================================ FILE: client_server/CMakeLists.txt ================================================ # cmake_minimum_required(VERSION ${CADABRA_CMAKE_VERSION}) project(Cadabra) if(POLICY CMP0167) cmake_policy(SET CMP0167 NEW) endif() #--------------------------------------------------------------------------- # Preamble. #--------------------------------------------------------------------------- print_header("Configuring client-server") set(INSTALL_LATEX_DIR "share/cadabra2") #--------------------------------------------------------------------------- # Locate libraries. #--------------------------------------------------------------------------- if(USE_GTK4) find_package(GLIBMM4 REQUIRED) else() find_package(GLIBMM3 REQUIRED) endif() find_package(SQLITE3 REQUIRED) # Don't set pthreads to required. Either we're on a platform where explict # linking with -lpthread is the norm (e.g. Linux) and it'll be found, or we're # on a platform that include pthreads by default (e.g. BSD, macOS) where this # won't find anything, or we're on a road-much-less-traveled OS where the user # can figure out what's wrong without a hard error here. find_package(Threads) # We need at least Boost 1.71.0 because we now use `beast` (for the # websocket functionality) which was not stable before that version. # Actually, it is probably not stable before 1.75.0, but if we go that # high we cannot build on Ubuntu 20.04 anymore. find_package(Boost 1.71.0 COMPONENTS program_options date_time filesystem REQUIRED) # OpenSSL needs to be linked in explicitly (probably because the boost # material referring to it is all in headers?). If you use a cmake module, # you will run into issues on MSYS2, so do *not* be tempted to do that # unless you test on MSYS2 first. set(OpenSSL_USE_STATIC_LIBS OFF) find_package(OpenSSL REQUIRED) message(STATUS "OPENSSL_LIBRARIES: ${OPENSSL_LIBRARIES}") message(STATUS "OPENSSL_SSL_LIBRARY: ${OPENSSL_SSL_LIBRARY}") message(STATUS "OPENSSL_CRYPTO_LIBRARY: ${OPENSSL_CRYPTO_LIBRARY}") message(STATUS "OPENSSL_INCLUDE_DIR: ${OPENSSL_INCLUDE_DIR}") #--------------------------------------------------------------------------- # Enumerate input files. #--------------------------------------------------------------------------- set(CADABRA_SERVER_SRC cadabra-server.cc Server.cc Snoop.cc websocket_client.cc websocket_server.cc ${CADABRA_CORE_DIR}/InstallPrefix.cc ${CADABRA_CORE_DIR}/DataCell.cc ${CADABRA_CORE_DIR}/Exceptions.cc ${CADABRA_CORE_DIR}/CdbPython.cc ${CADABRA_CORE_DIR}/Stopwatch.cc ${CADABRA_CORE_DIR}/pythoncdb/py_helpers.cc ${CADABRA_LIBS_DIR}/whereami/whereami.c ${CADABRA_LIBS_DIR}/base64/base64.cc ) set(CADABRA_CLIENT_SRC ScriptThread.cc ComputeThread.cc DocumentThread.cc Actions.cc Snoop.cc websocket_client.cc websocket_server.cc ${CADABRA_CORE_DIR}/DataCell.cc ${CADABRA_CORE_DIR}/Exceptions.cc ${CADABRA_CORE_DIR}/InstallPrefix.cc ${CADABRA_CORE_DIR}/Stopwatch.cc ${CADABRA_LIBS_DIR}/whereami/whereami.c ) set(JUPYTER_KERNEL_SRC cadabra-jupyter-kernel.cc cadabra-jupyter-kernel.hh cadabra-jupyter-main.cc Server.cc ${CADABRA_CORE_DIR}/InstallPrefix.cc ${CADABRA_CORE_DIR}/DataCell.cc ${CADABRA_CORE_DIR}/Exceptions.cc ${CADABRA_CORE_DIR}/CdbPython.cc ${CADABRA_CORE_DIR}/Stopwatch.cc ${CADABRA_LIBS_DIR}/whereami/whereami.c ${CADABRA_LIBS_DIR}/base64/base64.cc ) # set(CONNECTION_FILE ${CMAKE_CURRENT_SOURCE_DIR}/connection.json) # # add_custom_command(TARGET echo_kernel POST_BUILD # COMMAND ${CMAKE_COMMAND} -E # copy "${CONNECTION_FILE}" "${CMAKE_CURRENT_BINARY_DIR}/") # # add_custom_command(TARGET echo_kernel POST_BUILD # COMMAND ${CMAKE_COMMAND} -E # copy "${CMAKE_BINARY_DIR}/${XEUS_RUNTIME_FILE}" "${CMAKE_CURRENT_BINARY_DIR}/") #--------------------------------------------------------------------------- # Include directories and preprocessor directives. #--------------------------------------------------------------------------- include_directories( "." "${CADABRA_CORE_DIR}" "${CADABRA_LIBS_DIR}/pybind11/include" # "${CADABRA_LIBS_DIR}/websocketpp" "${CADABRA_LIBS_DIR}/internal/include" "${CADABRA_LIBS_DIR}/whereami" "${CADABRA_LIBS_DIR}/base64" "${CADABRA_LIBS_DIR}/nlohmann" ${OPENSSL_INCLUDE_DIR} ${Boost_INCLUDE_DIRS} ${Python_INCLUDE_DIRS} ${SQLITE3_INCLUDE_DIR} ) # add_definitions( # -D_WEBSOCKETPP_CPP11_STL_ # -DBOOST_ASIO_HAS_STD_CHRONO # -DBOOST_BIND_GLOBAL_PLACEHOLDERS # ) #--------------------------------------------------------------------------- # Make targets. #--------------------------------------------------------------------------- # add_executable(tst tst.cc) # target_link_libraries(tst # ${Boost_LIBRARIES} # Threads::Threads # ) # Server executable if(WIN32) message(STATUS "Using the win32 subsystem for cadabra-server") add_executable(cadabra-server WIN32 ${CADABRA_SERVER_SRC}) else() add_executable(cadabra-server ${CADABRA_SERVER_SRC}) endif() # target_link_libraries(cadabra-server cadabra_server) target_link_libraries(cadabra-server pybind11::embed ${Boost_LIBRARIES} ${SQLITE3_LIBRARIES} ${Python_LIBRARIES} ${GLIBMM_LIBRARIES} ${OPENSSL_LIBRARIES} Threads::Threads pthread ) if(WIN32) target_link_libraries(cadabra-server ws2_32 mswsock bcrypt) endif() if(UNIX) if(${CMAKE_SYSTEM_NAME} MATCHES ".*BSD|DragonFly") target_link_libraries(cadabra-server util) else() target_link_libraries(cadabra-server dl util) endif() endif() # Client library add_library(cadabra_client STATIC ${CADABRA_CLIENT_SRC}) target_link_libraries(cadabra_client ${Boost_LIBRARIES} ${SQLITE3_LIBRARIES} ${Python_LIBRARIES} ${GLIBMM_LIBRARIES} Threads::Threads ${OPENSSL_LIBRARIES} ) if(WIN32) target_link_libraries(cadabra_client ws2_32 bcrypt) endif() # cadabra2html add_executable(cadabra2html cadabra2html.cc ${CADABRA_LIBS_DIR}/base64/base64.cc ) target_link_libraries(cadabra2html cadabra_client) # cadabra2latex add_executable(cadabra2latex cadabra2latex.cc ${CADABRA_LIBS_DIR}/base64/base64.cc ) target_link_libraries(cadabra2latex cadabra_client) # Make sure sqlite3.dll is copied into the build directory on Windows. if (WIN32) add_custom_command(TARGET cadabra_client POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_if_different ${SQLITE3_LIBRARIES} ${PROJECT_BINARY_DIR}/${CADABRA_BUILD_TYPE}/sqlite3.dll ) endif() #--------------------------------------------------------------------------- # Installation. #--------------------------------------------------------------------------- # The preamble 'notebook.tex' should be kept in sync with the one in # ../frontend/common/texengine; this one is meant for printing so # slightly different, but cadabra-specific macros should be copied. if (NOT INSTALL_TARGETS_ONLY) install(FILES notebook.tex DESTINATION ${INSTALL_LATEX_DIR}) install(FILES notebook.html DESTINATION ${INSTALL_LATEX_DIR}) endif() #set(INSTALL_LIB_DIR lib CACHE PATH "Installation directory for libraries") install(TARGETS cadabra-server DESTINATION ${CDB_BIN_PATH}) install(TARGETS cadabra2html cadabra2latex DESTINATION ${CDB_BIN_PATH}) # We don't need to install the libcadabra_client library as it is statically # linked with our binaries anyway. # install( # TARGETS cadabra_client # RUNTIME DESTINATION bin COMPONENT runtime # LIBRARY ARCHIVE DESTINATION lib # LIBRARY DESTINATION lib # ) # install(TARGETS cadabra_server LIBRARY ARCHIVE DESTINATION lib LIBRARY DESTINATION lib) if(ENABLE_JUPYTER) install(TARGETS cadabra-jupyter-kernel DESTINATION ${CDB_BIN_PATH}) install(FILES kernel.json connection.json DESTINATION share/jupyter/kernels/cadabra) endif() if (MSVC AND NOT INSTALL_TARGETS_ONLY) # GTK helpers install( FILES "${_VCPKG_ROOT_DIR}/installed/x64-windows/tools/glib/gspawn-win64-helper.exe" "${_VCPKG_ROOT_DIR}/installed/x64-windows/tools/glib/gspawn-win64-helper-console.exe" DESTINATION ${CDB_BIN_PATH} ) # Python (for some reason this is not copied in) get_filename_component(CDB_PYTHON_BASE ${Python_LIBRARIES} NAME_WE) get_filename_component(CDB_PYTHON_PATH ${Python_LIBRARIES} DIRECTORY) message("--- Python directory: ${CDB_PYTHON_PATH}") string(TOLOWER ${CDB_PYTHON_BASE} CDB_PYTHON_BASE) message("--- Python library: ${CDB_PYTHON_BASE}.dll") install(FILES "${CDB_PYTHON_PATH}/../${CDB_PYTHON_BASE}.dll" DESTINATION ${CDB_BIN_PATH} ) endif() # manual pages if(NOT MSVC) install( FILES ../man/man1/cadabra-server.1 DESTINATION share/man/man1 ) endif() ================================================ FILE: client_server/ComputeThread.cc ================================================ #include #include #include #include "ComputeThread.hh" #include "DocumentThread.hh" #include "GUIBase.hh" #include "Actions.hh" #include "InstallPrefix.hh" #include "popen2.hh" #include #include #include "internal/unistd.h" #include "CdbPython.hh" using namespace cadabra; ComputeThread::ComputeThread(int server_port, std::string token, std::string ip_address) : gui(0), docthread(0), connection_is_open(false), restarting_kernel(false), server_pid(0), server_stdout(0), server_stderr(0), forced_server_port(server_port), forced_server_token(token), forced_server_ip_address(ip_address) { // The ComputeThread constructor (but _not_ the run() member!) is // always run on the gui thread, so we can grab the gui thread id // here. gui_thread_id=std::this_thread::get_id(); } ComputeThread::~ComputeThread() { if(server_stdout!=0) { close(server_stdout); // close(server_stderr); Glib::spawn_close_pid(server_pid); server_pid=0; server_stdout=0; server_stderr=0; } } void ComputeThread::set_master(GUIBase *b, DocumentThread *d) { gui=b; docthread=d; } void ComputeThread::init() { // Setup the WebSockets client. } void ComputeThread::try_connect() { wsclient.set_connect_handler(std::bind(&ComputeThread::on_open, this)); wsclient.set_fail_handler(std::bind(&ComputeThread::on_fail, this, std::placeholders::_1)); wsclient.set_close_handler(std::bind(&ComputeThread::on_close, this)); wsclient.set_message_handler(std::bind(&ComputeThread::on_message, this, std::placeholders::_1)); std::ostringstream uristr; uristr << "ws://" << (forced_server_ip_address.empty() ? "127.0.0.1" : forced_server_ip_address) << ":" << port; wsclient.connect(uristr.str()); // std::cerr << "cadabra-client: connect done" << std::endl; } void ComputeThread::run() { // This does *not* run on the GUI thread. init(); try_spawn_server(); try_connect(); // Enter run loop, which will never terminate anymore. The on_fail and on_close // handlers will re-try to establish connections when they go bad. wsclient.run(); } void ComputeThread::terminate() { wsclient.stop(); // If we have started the server ourselves, stop it now so we do // not leave mess behind. // http://riccomini.name/posts/linux/2012-09-25-kill-subprocesses-linux-bash/ if(server_pid!=0) { std::cerr << "cadabra-client: killing server" << std::endl; if(server_stdout!=0) { close(server_stdout); // close(server_stderr); Glib::spawn_close_pid(server_pid); server_pid=0; server_stdout=0; server_stderr=0; } // kill(server_pid, SIGKILL); // if(server_stdout) // pclose2(server_stdout, server_pid); } } bool ComputeThread::kernel_is_connected() const { return connection_is_open; } void ComputeThread::all_cells_nonrunning() { for(auto it: running_cells) { std::shared_ptr rs_action = std::make_shared(it.first, false); docthread->queue_action(rs_action); } if(gui) { gui->process_data(); gui->on_kernel_runstatus(false); } running_cells.clear(); } void ComputeThread::on_fail(const boost::beast::error_code& ec) { if(!restarting_kernel) { std::cerr << "cadabra-client: connection to server on port " << port << " failed, " << ec.message() << std::endl; } connection_is_open=false; all_cells_nonrunning(); if(gui && server_pid!=0) { // When a kernel restart is in progress, server_pid will be zero // and this block never runs. close(server_stdout); // close(server_stderr); // std::cerr << "closing connetion to terminated server" << std::endl; Glib::spawn_close_pid(server_pid); // kill(server_pid, SIGKILL); server_pid=0; server_stdout=0; server_stderr=0; gui->on_network_error(); } try_spawn_server(); try_connect(); } using SlotSpawnChildSetup = sigc::slot; void ComputeThread::try_spawn_server() { // Startup the server. First generate a UUID, pass this to the // starting server, then use this UUID to get access to the server // port. // std::cerr << "cadabra-client: spawning server" << std::endl; if(forced_server_port!=0) { port=forced_server_port; authentication_token=forced_server_token; return; } std::vector argv, envp; #if defined(_WIN32) || defined(_WIN64) argv.push_back("cadabra-server.exe"); #else const char *appdir = getenv("APPDIR"); if(appdir) { // std::cerr << "This is an AppImage, APPDIR = " << appdir << std::endl; argv.push_back(std::string(appdir)+"/usr/bin/cadabra-server"); } else { // std::cerr << "Not an AppImage." << std::endl; argv.push_back("cadabra-server"); } #endif Glib::Pid pid; std::string wd(""); // See https://bugs.launchpad.net/inkscape/+bug/1662531 for things related to // the 'envp' argument in the call below. try { #ifdef _WIN32 Glib::SpawnFlags flags = Glib::SPAWN_DO_NOT_REAP_CHILD | Glib::SPAWN_SEARCH_PATH | Glib::SpawnFlags::SPAWN_STDERR_TO_DEV_NULL; #else #if GLIBMM_MAJOR_VERSION > 2 || (GLIBMM_MAJOR_VERSION == 2 && GLIBMM_MINOR_VERSION >= 68) Glib::SpawnFlags flags = Glib::SpawnFlags::DEFAULT | Glib::SpawnFlags::SEARCH_PATH, #else Glib::SpawnFlags flags = Glib::SPAWN_DO_NOT_REAP_CHILD | Glib::SPAWN_SEARCH_PATH; #endif #endif Glib::spawn_async_with_pipes(wd, argv, /* envp, WITH envp, Fedora 27 fails to start python properly */ flags, sigc::slot(), &pid, 0, &server_stdout, 0); // We need to see stderr on the console // &server_stderr); //#if GLIBMM_MAJOR_VERSION > 2 || (GLIBMM_MAJOR_VERSION == 2 && GLIBMM_MINOR_VERSION >= 68) // Glib::SpawnFlags::DEFAULT | Glib::SpawnFlags::SEARCH_PATH, // sigc::slot(), //#else // flags, // sigc::slot([](){ FreeConsole(); }), //#endif // &pid, // 0, // &server_stdout, // 0); // We need to see stderr on the console // // &server_stderr); char buffer[100]; FILE *f = fdopen(server_stdout, "r"); if(fscanf(f, "%99s", buffer)!=1) { throw std::logic_error("Failed to read port from server."); } port = atoi(buffer); if(fscanf(f, "%99s", buffer)!=1) { throw std::logic_error("Failed to read authentication token from server."); } authentication_token=std::string(buffer); // std::cerr << "auth token: " << authentication_token << std::endl; } catch(Glib::SpawnError& err) { std::cerr << "Failed to start server " << argv[0] << ": " << err.what() << std::endl; // FIXME: cannot just fall through, the server is not up! } } void ComputeThread::on_open() { connection_is_open=true; restarting_kernel=false; if(gui) { gui->on_connect(); gui->on_kernel_runstatus(false); } // // now it is safe to use the connection // std::string msg; // //// if(stopit) { //// msg = //// "{ \"header\": { \"uuid\": \"none\", \"msg_type\": \"execute_interrupt\" }," //// " \"content\": { \"code\": \"print(42)\n\"} " //// "}"; //// } //// else { // msg = // "{ \"header\": { \"uuid\": \"none\", \"msg_type\": \"execute_request\" }," // " \"content\": { \"code\": \"import time\nprint(42)\ntime.sleep(10)\n\"} " // "}"; //// } // //// c->send(hdl, "import time\nfor i in range(0,10):\n print('this is python talking '+str(i))\nex=Ex('A_{m n}')\nprint(str(ex))", websocketpp::frame::opcode::text); // c->send(hdl, msg, websocketpp::frame::opcode::text); } void ComputeThread::on_close() { // std::cerr << "cadabra-client: connection closed" << std::endl; connection_is_open=false; all_cells_nonrunning(); if(gui) { if(restarting_kernel) { gui->on_disconnect("restarting kernel"); gui->on_kernel_runstatus(true); } else { gui->on_disconnect("not connected"); } } sleep(1); // do not cause a torrent... try_connect(); } void ComputeThread::cell_finished_running(DataCell::id_t id) { if(id.id==0) { // This was code without a cell representation (run in // response to e.g. a slider update). Ignore. } else { auto it=running_cells.find(id); if(it==running_cells.end()) { throw std::logic_error("Cannot find cell with id = "+std::to_string(id.id)); } if(it->second==1) { // Mark this cell as no longer running. std::shared_ptr rs_action = std::make_shared(id, false); docthread->queue_action(rs_action); running_cells.erase(it); } else it->second -= 1; } } void ComputeThread::on_message(const std::string& msg) { // Parse the JSON message. nlohmann::json root; try { root=nlohmann::json::parse(msg); } catch(nlohmann::json::exception& e) { std::cerr << "cadabra-client: cannot parse message." << std::endl; return; } if(getenv("CADABRA_SHOW_RECEIVED")) { std::cerr << "RECV: " << root.dump(3) << std::endl; } if(root.count("header")==0) { std::cerr << "cadabra-client: received message without 'header'." << std::endl; return; } if(root.count("content")==0) { std::cerr << "cadabra-client: received message without 'content'." << std::endl; return; } const nlohmann::json& header = root["header"]; const nlohmann::json& content = root["content"]; const std::string msg_type = root.value("msg_type", ""); DataCell::id_t parent_id; parent_id.id = header.value("parent_id", uint64_t(0)); if(header.value("parent_origin", "")=="client") parent_id.created_by_client=true; else parent_id.created_by_client=false; DataCell::id_t cell_id; cell_id.id = header["cell_id"].get(); if(header.value("cell_origin", "")=="client") cell_id.created_by_client=true; else cell_id.created_by_client=false; // std::cerr << "received cell with id " << cell_id.id << std::endl; // Determine if this refers to a special cell in the interactive console. if (interactive_cells.find(parent_id.id) != interactive_cells.end()) { interactive_cells.insert(cell_id.id); docthread->on_interactive_output(root); } else if (interactive_cells.find(cell_id.id) != interactive_cells.end()) { docthread->on_interactive_output(root); } else if (msg_type.find("csl_") == 0) { root["header"]["from_server"] = true; docthread->on_interactive_output(root); } else if (msg_type == "progress_update") { std::string msg = content.value("msg", "Idle"); int n = content.value("n", 0); int total = content.value("total", 0); // FIXME: do something with 'pulse': // int pulse = content.value("pulse", false); docthread->set_progress(msg, n, total); } else if(msg_type=="completed") { // std::cerr << "received completion of " << content["original"] << " -> " << content["completed"] << std::endl; // Finally, the action to add the output cell. std::string toadd=content["completed"].get(); if(toadd.size()>0) { toadd=toadd.substr(content["original"].get().size()); int pos=content["position"].get(); int alternative=content["alternative"].get(); std::shared_ptr action = std::make_shared(cell_id, pos, toadd, alternative); docthread->queue_action(action); } } else { try { bool finished = header["last_in_sequence"].get(); if (finished) { if(parent_id.id!=0) { // If this cell references variables, store them in the DataCell. if(content.count("variables")>0) { std::set vars; for(const auto& var: content["variables"]) vars.insert(var); std::shared_ptr action = std::make_shared(parent_id, vars); docthread->queue_action(action); } } cell_finished_running(parent_id); } if (content.count("output")>0 && content["output"].get().size() > 0) { if (msg_type == "output") { std::string output = "\\begin{verbatim}" + content["output"].get() + "\\end{verbatim}"; // Stick an AddCell action onto the stack. We instruct the // action to add this result output cell as a child of the // corresponding input cell. DataCell result(cell_id, DataCell::CellType::output, output); // Finally, the action to add the output cell. std::shared_ptr action = std::make_shared(result, parent_id, ActionAddCell::Position::child); docthread->queue_action(action); } else if (msg_type == "verbatim") { std::string output = "\\begin{verbatim}" + content["output"].get() + "\\end{verbatim}"; // Stick an AddCell action onto the stack. We instruct the // action to add this result output cell as a child of the // corresponding input cell. DataCell result(cell_id, DataCell::CellType::verbatim, output); // Finally, the action to add the output cell. std::shared_ptr action = std::make_shared(result, parent_id, ActionAddCell::Position::child); docthread->queue_action(action); } else if (msg_type == "latex_view") { // std::cerr << "received latex cell " << content["output"].asString() << std::endl; DataCell result(cell_id, DataCell::CellType::latex_view, content["output"].get()); std::shared_ptr action = std::make_shared(result, parent_id, ActionAddCell::Position::child); docthread->queue_action(action); } else if (msg_type == "input_form") { DataCell result(cell_id, DataCell::CellType::input_form, content["output"].get()); std::shared_ptr action = std::make_shared(result, parent_id, ActionAddCell::Position::child); docthread->queue_action(action); } else if (msg_type == "error" || msg_type=="fault") { std::string error = "\\begin{verbatim}" + content["output"].get() + "\\end{verbatim}"; //if (msg_type == "fault") { // error = "Kernel fault\\begin{small}" + error + "\\end{small}"; // } // Stick an AddCell action onto the stack. We instruct the // action to add this result output cell as a child of the // corresponding input cell. DataCell result(cell_id, DataCell::CellType::error, error); // Finally, the action. std::shared_ptr action = std::make_shared(result, parent_id, ActionAddCell::Position::child); docthread->queue_action(action); // Position the cursor in the cell that generated the error. All other cells on // the execute queue have been cancelled by the server. std::shared_ptr actionpos = std::make_shared(parent_id, ActionPositionCursor::Position::in); docthread->queue_action(actionpos); // Action has stopped, so mark all cells as non-running. all_cells_nonrunning(); } else if (msg_type == "image_png") { DataCell result(cell_id, DataCell::CellType::image_png, content["output"].get()); std::shared_ptr action = std::make_shared(result, parent_id, ActionAddCell::Position::child); docthread->queue_action(action); } else if (msg_type == "image_svg") { DataCell result(cell_id, DataCell::CellType::image_svg, content["output"].get()); std::shared_ptr action = std::make_shared(result, parent_id, ActionAddCell::Position::child); docthread->queue_action(action); } else if (msg_type == "slider") { DataCell result(cell_id, DataCell::CellType::slider, content["output"].get()); std::shared_ptr action = std::make_shared(result, parent_id, ActionAddCell::Position::child); docthread->queue_action(action); } else { std::cerr << "cadabra-client: received cell we did not expect: " << msg_type << ": " << content << std::endl; } } } catch (std::logic_error& ex) { // WARNING: if the server sends std::cerr << "cadabra-client: trouble processing server response: " << ex.what() << std::endl; } } // Update kernel busy indicator depending on number of running cells. if(number_of_cells_executing()>0) gui->on_kernel_runstatus(true); else gui->on_kernel_runstatus(false); gui->process_data(); } void ComputeThread::execute_interactive(uint64_t id, const std::string& code) { assert(gui_thread_id == std::this_thread::get_id()); if (!connection_is_open) return; if (code.substr(0, 7) == "reset()") return restart_kernel(); nlohmann::json req, header, content; header["msg_type"] = "execute_request"; header["cell_id"] = id; header["interactive"] = true; content["code"] = code.c_str(); req["auth_token"] = authentication_token; req["header"] = header; req["content"] = content; std::ostringstream oss; oss << req << std::endl; if(getenv("CADABRA_SHOW_SENT")) { std::cerr << "SENT: " << req.dump(3) << std::endl; } wsclient.send(oss.str()); interactive_cells.insert(id); } void ComputeThread::execute_cell(DTree::iterator it, std::string no_assign, std::vector output_cell_ids) { // This absolutely has to be run on the main GUI thread. assert(gui_thread_id==std::this_thread::get_id()); if(connection_is_open==false) return; const DataCell& dc=(*it); // std::cout << "cadabra-client: ComputeThread going to execute " << dc.textbuf << std::endl; if((it->textbuf).substr(0,7)=="reset()") { restart_kernel(); std::shared_ptr action = std::make_shared(it->id(), ActionPositionCursor::Position::next); docthread->queue_action(action); return; } // Position the cursor in the next cell so this one will not // accidentally get executed twice. This runs synchronously! std::shared_ptr actionpos = std::make_shared(it->id(), ActionPositionCursor::Position::next); docthread->queue_action(actionpos); gui->process_data(); // For a code cell, construct a server request message and then // send the cell to the server. if(it->cell_type==DataCell::CellType::python) { auto rit=running_cells.find(dc.id()); if(rit==running_cells.end()) running_cells[dc.id()]=1; else rit->second += 1; // Schedule an action to update the running status of this cell. std::shared_ptr rs_action = std::make_shared(it->id(), true); docthread->queue_action(rs_action); nlohmann::json req, header, content; header["uuid"]="none"; header["cell_id"]=dc.id().id; if(dc.id().created_by_client) header["cell_origin"]="client"; else header["cell_origin"]="server"; header["msg_type"]="execute_request"; header["output_cell_ids"]=output_cell_ids; req["auth_token"]=authentication_token; req["header"]=header; content["remove_variable_assignments"]=no_assign; content["code"]=dc.textbuf; req["content"]=content; gui->on_kernel_runstatus(true); std::ostringstream str; str << req << std::endl; if(getenv("CADABRA_SHOW_SENT")) { std::cerr << "SENT: " << req.dump(3) << std::endl; } wsclient.send(str.str()); // NOTE: we can get a return message in on_message at any point after this, // it will come in on a different thread! } // For a LaTeX cell, immediately request a new latex output cell to be displayed. if(it->cell_type==DataCell::CellType::latex) { // Stick an AddCell action onto the stack. We instruct the // action to add this result output cell as a child of the // corresponding input cell. DataCell result(DataCell::CellType::latex_view, it->textbuf); std::shared_ptr action = std::make_shared(result, it->id(), ActionAddCell::Position::child); docthread->queue_action(action); } } void ComputeThread::update_variable_on_server(std::string variable, double value) { nlohmann::json req, header, content; header["uuid"]="none"; header["cell_id"]=0; header["cell_origin"]="client"; header["msg_type"]="execute_request"; req["auth_token"]=authentication_token; req["header"]=header; content["code"]=variable + "=" + std::to_string(value); req["content"]=content; std::ostringstream str; str << req << std::endl; if(getenv("CADABRA_SHOW_SENT")) { std::cerr << "SENT: " << req.dump(3) << std::endl; } wsclient.send(str.str()); } int ComputeThread::number_of_cells_executing() const { return running_cells.size(); } void ComputeThread::stop() { if(connection_is_open==false) return; // std::cerr << "stopping existing kernel" << std::endl; nlohmann::json req, header, content; header["uuid"]="none"; header["msg_type"]="execute_interrupt"; req["auth_token"]=authentication_token; req["header"]=header; std::ostringstream str; str << req << std::endl; // std::cerr << str.str() << std::endl; server_pid=0; if(getenv("CADABRA_SHOW_SENT")) { std::cerr << "SENT: " << req.dump(3) << std::endl; } wsclient.send(str.str()); // Do not yet mark cells non-running, otherwise we are unable to // process any error messages. Do this once the stop comes through. // all_cells_nonrunning(); } void ComputeThread::restart_kernel() { if(connection_is_open==false) return; restarting_kernel=true; // Restarting the kernel means all previously running blocks have stopped running. // Inform the GUI about this. // FIXME: set all running flags to false gui->on_kernel_runstatus(false); // std::cerr << "cadabra-client: restarting kernel" << std::endl; nlohmann::json req, header, content; header["uuid"]="none"; header["msg_type"]="exit"; header["from_server"] = true; req["auth_token"]=authentication_token; req["header"]=header; std::ostringstream str; str << req << std::endl; // std::cerr << str.str() << std::endl; if(getenv("CADABRA_SHOW_SENT")) { std::cerr << "SENT: " << req.dump(3) << std::endl; } wsclient.send(str.str()); docthread->on_interactive_output(req); } bool ComputeThread::complete(DTree::iterator it, int pos, int alternative) { if(connection_is_open==false) return false; const DataCell& dc=(*it); nlohmann::json req, header, content; header["uuid"]="none"; header["cell_id"]=dc.id().id; if(dc.id().created_by_client) header["cell_origin"]="client"; else header["cell_origin"]="server"; header["msg_type"]="complete"; req["auth_token"]=authentication_token; req["header"]=header; std::string todo = it->textbuf.substr(0,pos); // if(todo.size()>0 && todo[todo.size()-1]=='\n') // todo=todo.substr(0, todo.size()-1); // std::cerr << "to complete full: " << todo << std::endl; size_t lst=todo.find_last_of("\n(){}[]:\t "); if(lst!=std::string::npos) todo=todo.substr(lst+1); // std::cerr << "to complete strip: " << todo << std::endl; if(todo.size()==0) return false; req["string"]=todo; req["position"]=pos; req["alternative"]=alternative; std::ostringstream str; str << req << std::endl; // std::cerr << str.str() << std::endl; server_pid=0; if(getenv("CADABRA_SHOW_SENT")) { std::cerr << "SENT: " << req.dump(3) << std::endl; } wsclient.send(str.str()); return true; } ================================================ FILE: client_server/ComputeThread.hh ================================================ #pragma once #include #include #include #include #include "websocket_client.hh" #include "DataCell.hh" namespace cadabra { class GUIBase; class DocumentThread; /// \ingroup clientserver /// /// Base class which talks to the server and sends Action objects back to the /// DocumentThread. /// /// ComputeThread is the base class which takes care of doing actual /// computations with the cells in a document. It handles talking to /// the server backend. It knows how to pass cells to the server and /// ask them to be executed. Results are reported back to the GUI by /// putting ActionBase objects onto its todo stack. ComputeThread never /// directly modifies the document tree. class ComputeThread { public: /// If the ComputeThread is constructed with a null pointer to the /// gui, there will be no gui updates, just DTree updates. ComputeThread(int server_port=0, std::string token="", std::string ip_address="127.0.0.1"); ComputeThread(const ComputeThread& )=delete; // You cannot copy this object ~ComputeThread(); /// Determine the objects that this compute thread should be /// talking to. void set_master(GUIBase *, DocumentThread *); /// Main entry point, which will connect to the server and /// then start an event loop to handle communication with the /// server. Only terminates when the connection drops, so run /// your GUI on a different thread. void run(); /// In order to execute code on the server, call the /// following from the GUI thread. This method returns as /// soon as the request has been put on the network queue. If /// no communication with the server is necessary, this /// returns immediately. The ComputeThread will report the /// result of the computation/processing by adding actions to /// the DocumentThread owned pending_actions stack, by /// calling queue_action. It will never modify the cell /// directly, and will also never modify any other cells in /// the document tree. void execute_cell(DTree::iterator, std::string no_assign="", std::vector output_cell_ids=std::vector() ); void execute_interactive(uint64_t id, const std::string& code); /// Update a variable in the kernel. This does essentially the /// same thing as execute_cell, but will not refer to any /// cell as it creates the code itself. void update_variable_on_server(std::string variable, double value); /// Stop the current cell execution on the server and remove /// all other cells from the run queue as well. void stop(); /// Restart the kernel. void restart_kernel(); /// Request completion of a string. Returns `false` if the current /// cell cannot be completed, in which case the TAB which led to /// the request should be interpreted literally and used for spacing. /// The `alternative` argument is the serial number of the requested /// completion, in case there is more than one possible completion. bool complete(DTree::iterator, int pos, int alternative); // Determine if there are still cells running on the server. // FIXME: this does not guarantee thread-safety but at the moment // is only used for updating status bars etc. // FIXME: can be moved to DocumentThread. int number_of_cells_executing(void) const; /// Terminate the compute thread, in preparation for shutting /// down the client altogether. void terminate(); /// Return the status of the connection to the kernel. bool kernel_is_connected() const; private: GUIBase *gui; DocumentThread *docthread; // For debugging purposes, we keep record of the gui thread id, // so that we can flag when code runs on the wrong thread. // Gets initialised in the ComputeThread constructor. std::thread::id gui_thread_id; // Keeping track of cells which are running on the server, in // a form which allows us to look them up quickly based only // on the id (which is all that the server knows about). A // cell can be queued multiple times for running; this is the // integer in the map. std::map running_cells; // WebSocket++ things. websocket_client wsclient; bool connection_is_open, restarting_kernel; void init(); void try_connect(); void try_spawn_server(); void on_open(); void on_fail(const boost::beast::error_code&); void on_close(); void on_message(const std::string& msg); void cell_finished_running(DataCell::id_t); /// Set all cells to be non-running (e.g. after a kernel failure) and /// report the status of each cell to the GUI. void all_cells_nonrunning(); std::set interactive_cells; // Self-started server Glib::Pid server_pid; int server_stdout, server_stderr; unsigned short port; std::string authentication_token; int forced_server_port; std::string forced_server_token; std::string forced_server_ip_address; }; } ================================================ FILE: client_server/DocumentThread.cc ================================================ #include "Actions.hh" #include "DocumentThread.hh" #include "GUIBase.hh" #include "ComputeThread.hh" #include "CdbPython.hh" #include #include #include #include #include //#include #include #include #ifndef EMSCRIPTEN #include #include "Snoop.hh" #endif #include "Config.hh" using namespace cadabra; DocumentThread::DocumentThread(GUIBase* g) : gui(g) , compute(0) , current_cell(doc.end()) , follow_mode(false) , follow_cell(doc.end()) , follow_last_cell(doc.end()) , disable_stacks(false) { // Setup logging. std::string version=std::string(CADABRA_VERSION_SEM); #ifndef EMSCRIPTEN snoop::log.init("Cadabra", version, "log.cadabra.science"); snoop::log.set_sync_immediately(true); #endif // snoop::log(snoop::warn) << "Starting" << snoop::flush; } void DocumentThread::on_interactive_output(const nlohmann::json& ) { } void DocumentThread::set_progress(const std::string& msg, int cur_step, int total_steps) { } void DocumentThread::set_compute_thread(ComputeThread *cl) { compute = cl; } void DocumentThread::new_document() { // Setup a single-cell document. This operation itself cannot be undone, // so we do it directly on the doc, not using Actions. DataCell top(DataCell::CellType::document); DTree::iterator doc_it = doc.set_head(top); gui->add_cell(doc, doc_it, false); // One Python input cell in the empty document. DataCell one(DataCell::CellType::python, ""); DTree::iterator one_it = doc.append_child(doc_it, one); gui->add_cell(doc, one_it, false); // Put a 'position cursor' action on the stack to be executed as // soon as the GUI is up. std::shared_ptr actionpos = std::make_shared(one_it->id(), ActionPositionCursor::Position::in); queue_action(actionpos); } void DocumentThread::load_from_string(const std::string& json) { std::lock_guard guard(stack_mutex); pending_actions=std::queue >(); doc.clear(); JSON_deserialise(json, doc); gui->remove_all_cells(); build_visual_representation(); } void DocumentThread::undo() { stack_mutex.lock(); if(undo_stack.size()==0) { //std::cerr << "no entries left on the stack" << std::endl; stack_mutex.unlock(); return; } disable_stacks=true; auto ua = undo_stack.top(); //std::cerr << "Undo action " << typeid(*ua).name() << std::endl; redo_stack.push(ua); undo_stack.pop(); // std::cerr << "DocumentThread::undo: undo_stack.size() == " << undo_stack.size() << std::endl; ua->revert(*this, *gui); disable_stacks=false; stack_mutex.unlock(); } void DocumentThread::redo() { stack_mutex.lock(); if(redo_stack.size()==0) { //std::cerr << "no entries left on the stack" << std::endl; stack_mutex.unlock(); return; } disable_stacks=true; auto ua = redo_stack.top(); //std::cerr << "Undo action " << typeid(*ua).name() << std::endl; undo_stack.push(ua); redo_stack.pop(); ua->execute(*this, *gui); disable_stacks=false; stack_mutex.unlock(); } void DocumentThread::build_visual_representation() { // Because the add_cell method figures out by itself where to generate the VisualCell, // we only have feed all cells in turn. DTree::iterator doc_it=doc.begin(); while(doc_it!=doc.end()) { // std::cout << "ADDING:" << doc_it->textbuf << std::endl; gui->add_cell(doc, doc_it, false); ++doc_it; } } //const DTree& DocumentThread::dtree() // { // return doc; // } template struct ci_equal { bool operator()(charT ch1, charT ch2) { return std::toupper(ch1) == std::toupper(ch2); } }; template int ci_find_substr( const T& str1, const T& str2, int start_pos ) { auto start=str1.begin(); start+=start_pos; typename T::const_iterator it = std::search( start, str1.end(), str2.begin(), str2.end(), ci_equal() ); if ( it != str1.end() ) return it - str1.begin(); else return -1; } std::pair DocumentThread::find_string(DTree::iterator start_it, size_t start_pos, const std::string& f, bool case_ins) const { // std::cerr << "finding from pos " << start_pos << ", " << &(*start_it) << ": " << start_it->textbuf.substr(0,30) << std::endl; DTree::iterator doc_it=start_it; while(doc_it!=doc.end()) { // std::cout << doc_it->textbuf << std::endl; // FIXME: re-enable searching in output cells. if(doc_it->hidden==false && (doc_it->cell_type==DataCell::CellType::python || doc_it->cell_type==DataCell::CellType::latex)) { size_t pos; if(case_ins) pos = ci_find_substr(doc_it->textbuf, f, start_pos); else pos = doc_it->textbuf.find(f, start_pos); if(pos!=std::string::npos) return std::make_pair(doc_it, pos); } start_pos=0; // after one fail, start next cell at zero ++doc_it; } return std::make_pair(doc.end(), std::string::npos); } void DocumentThread::queue_action(std::shared_ptr ab) { std::lock_guard guard(stack_mutex); pending_actions.push(ab); } void DocumentThread::run_all_cells() { follow_mode=true; DTree::sibling_iterator sib=doc.begin(doc.begin()); while(sib!=doc.end(doc.begin())) { if(sib->cell_type==DataCell::CellType::python) { run_cell(DTree::iterator(sib), false); follow_last_cell=DTree::iterator(sib); } ++sib; } } void DocumentThread::run_cell(DTree::iterator it, bool shift_pressed) { // First ensure that this cell is not already running, otherwise all hell // will break loose when we try to double-remove the existing output cell etc. if(it->running) return; // Ensure this cell is not empty either. if(it->textbuf.size()==0) return; // Remove child nodes, if any. // FIXME: Does it make more sense to do this only after the // execution result comes back from the server? DTree::sibling_iterator sib=doc.begin(it); gui->dim_output_cells(it); while(sib!=doc.end(it)) { // std::cout << "cadabra-client: scheduling output cell for removal: " << sib->id().id << std::endl; std::shared_ptr action = std::make_shared(sib->id()); queue_action(action); ++sib; } // Since the user has initiated this cell execution, we can // turn on cell follow mode. follow_cell=it; follow_mode=true; // Execute the cell. // std::cerr << "Executing cell " << it->id().id << std::endl; // If this is a LaTeX input cell, and auto-close is turned on, close // the input cell. Make sure to also feed that into the document // itself! if(it->cell_type==DataCell::CellType::latex) if(prefs.auto_close_latex) gui->hide_visual_cells(it); // Execute the cell. Make sure this comes after the hiding logic above. compute->execute_cell(it); } void DocumentThread::run_cells_referencing_variable(std::string variable, double value) { // First update the variable itself. compute->update_variable_on_server(variable, value); // Re-run all cells referencing this variable. follow_mode=false; DTree::iterator it = doc.begin(); while(it!=doc.end()) { if(it->cell_type==DataCell::CellType::python) { if(it->variables_referenced.count(variable)==1) { if(it->textbuf.find("slider(")==std::string::npos) { // We have found a cell which depends on the variable. // Collect the cell_id's of the current output cells, // so that we can re-use these. std::vector output_cell_ids; DTree::iterator sib = doc.begin(it); while(sib != doc.end(it)) { output_cell_ids.push_back(sib->id().id); ++sib; } // Now execute. compute->execute_cell(it, variable, output_cell_ids); } } } ++it; } } void DocumentThread::process_action_queue() { // This routine *absolutely* has to be run on the main GUI thread. Anything // else is a bug. if(main_thread_id != std::this_thread::get_id()) std::cerr << "INTERNAL ERROR: DocumentThread::process_action_queue not running on main thread." << std::endl; stack_mutex.lock(); while(pending_actions.size()>0) { // std::cerr << "pending_actions.size() == " << pending_actions.size() << std::endl; std::shared_ptr ab = pending_actions.front(); // Unlock the action queue while we are processing this particular action, // so that other actions can be added which we run. stack_mutex.unlock(); // std::cerr << "Executing action " << typeid(*ab).name() << " for " << ab->ref_id.id << std::endl; // Execute the action; this will run synchronously, so after // this returns the doc and visual representation have both been // updated. try { ab->execute(*this, *gui); } catch (const std::exception& err) { on_unhandled_error(err); } // Lock the queue to remove the action just executed, and // add it to the undo stack. stack_mutex.lock(); if(ab->undoable()) undo_stack.push(ab); if(ab->callback != nullptr) ab->callback(); if(pending_actions.size()>0) // some actions clear the queue pending_actions.pop(); } stack_mutex.unlock(); } bool DocumentThread::on_unhandled_error(const std::exception& err) { return true; } DocumentThread::Prefs::Prefs(bool use_defaults) { #ifndef EMSCRIPTEN config_path=std::string(Glib::get_user_config_dir()) + "/cadabra2.conf"; try { if (!use_defaults) { std::ifstream f(config_path); if (f) { try { f >> data; } catch(nlohmann::json::exception& ex) { std::cerr << "Config file " << config_path << " is not JSON; ignoring." << std::endl; data = nlohmann::json::object(); } } else { data = nlohmann::json::object(); // Backwards compatibility, check to see if cadabra.conf exists // and if so take the is_registered variable from there std::ifstream old_f(std::string(Glib::get_user_config_dir()) + "/cadabra.conf"); if (old_f) { std::string line; while (old_f.good()) { std::getline(old_f, line); if (line.find("registered=true") != std::string::npos) { data["is_registered"] = true; break; } } } } } } catch(std::exception& ex) { data = nlohmann::json::object(); } font_step = data.value("font_step", 0); highlight = data.value("highlight", false); is_registered = data.value("is_registered", false); is_anonymous = data.value("is_anonymous", false); git_path = data.value("git_path", ""); python_path = data.value("python_path", ""); move_into_new_cell = data.value("move_into_new_cell", false); tab_completion = data.value("tab_completion", true); microtex = data.value("microtex", true); auto_close_latex = data.value("auto_close_latex", true); // Force microtex when this is an AppImage. const char *appdir = getenv("APPDIR"); if(appdir) microtex=true; // Force microtex when we are on Windows. #if(_WIN32) microtex = true; #endif if(git_path=="") git_path="/usr/bin/git"; // Get the colours for syntax highlighting. if(data.count("colours")==0) data["colours"]={ {"python", nlohmann::json::object() }, {"latex", nlohmann::json::object() } }; const auto& python_colours = data["colours"]["python"]; colours["python"]["keyword"] = python_colours.value("keyword", "RoyalBlue"); colours["python"]["operator"] = python_colours.value("operator", "SlateGray"); colours["python"]["brace"] = python_colours.value("brace", "SlateGray"); colours["python"]["string"] = python_colours.value("string", "ForestGreen"); colours["python"]["comment"] = python_colours.value("comment", "Silver"); colours["python"]["object"] = python_colours.value("object", "DarkGray"); colours["python"]["number"] = python_colours.value("number", "Sienna"); colours["python"]["maths"] = python_colours.value("maths", "Olive"); colours["python"]["function"] = python_colours.value("function", "FireBrick"); colours["python"]["decorator"] = python_colours.value("decorator", "DarkViolet"); colours["python"]["class"] = python_colours.value("class", "MediumOrchid"); const auto& latex_colours = data["colours"]["latex"]; colours["latex"]["command"] = latex_colours.value("command", "rgb(52,101,164)"); colours["latex"]["parameter"] = latex_colours.value("brace", "rgb(245,121,0)"); colours["latex"]["comment"] = latex_colours.value("comment", "Silver"); colours["latex"]["maths"] = latex_colours.value("maths", "Sienna"); #endif } void DocumentThread::Prefs::save() { std::ofstream f(config_path); if (f) { data["font_step"] = font_step; data["highlight"] = highlight; data["is_registered"] = is_registered; data["is_anonymous"] = is_anonymous; data["python_path"] = python_path; data["move_into_new_cell"] = move_into_new_cell; data["tab_completion"] = tab_completion; data["microtex"] = microtex; data["auto_close_latex"] = auto_close_latex; for (const auto& lang : colours) { for (const auto& kw : lang.second) data["colours"][lang.first][kw.first] = kw.second; } data["git_path"] = git_path; f << data << '\n'; } else std::cerr << "Warning: could not write to config file\n"; } void DocumentThread::set_user_details(const std::string& name, const std::string& email, const std::string& affiliation) { #ifndef EMSCRIPTEN snoop::log("name") << name << snoop::flush; snoop::log("email") << email << snoop::flush; snoop::log("affiliation") << affiliation << snoop::flush; #endif } bool DocumentThread::help_type_and_topic(const std::string& before, const std::string& after, help_t& help_type, std::string& help_topic) const { help_t objtype=help_t::algorithm; if(! (before.size()==0 && after.size()==0) ) { // We provide help for properties, algorithms and reserved node // names. Properties are delimited to the left by '::' and to // the right by anything non-alnum. Algorithms are delimited to // the left by non-alnum except '_' and to the right by '('. Reserved node // names are TeX symbols, starting with '\'. // // So scan the 'before' string for a left-delimiter and the 'after' string // for a right-delimiter. int lpos=before.size()-1; while(lpos>=0) { if(before[lpos]==':' && lpos>0 && before[lpos-1]==':') { objtype=help_t::property; break; } if(before[lpos]=='\\') { objtype=help_t::latex; break; } if(isalnum(before[lpos])==0 && before[lpos]!='_') { objtype=help_t::algorithm; break; } --lpos; } if(objtype==help_t::none) return false; ++lpos; size_t rpos=0; while(rpos #include #include #include #include #include "DataCell.hh" #include "tree.hh" namespace cadabra { class ActionBase; class ActionAddCell; class ActionPositionCursor; class ActionRemoveCell; class ActionSetRunStatus; class ActionSplitCell; class ActionInsertText; class ActionCompleteText; class ActionEraseText; class ComputeThread; class GUIBase; /// \ingroup clientserver /// /// A base class with all the logic to manipulate a Cadabra /// notebook document. Relies on the various objects derived from /// ActionBase in order to get actual work done. All methods here /// will always run on the GUI thread. /// /// In order to implement a GUI, derive from both DocumentThread /// and GUIBase and then implement the virtual functions of the /// latter (those implement basic insertion/removal of notebook /// cells; the logic to figure out which ones and to implement the /// undo/redo stack is all in the GUI-agnostic DocumentThread). class DocumentThread { public: DocumentThread(GUIBase *); /// It is not possible to copy-construct a DocumentThread as /// it holds on to resources which are not easily copied /// (such as GUI elements). DocumentThread(const DocumentThread&)=delete; /// Let the notebook know about the ComputeThread so that it /// can send cells for evaluation. Notebook does NOT own this /// pointer. virtual void set_compute_thread(ComputeThread *); /// Ensure that the gui has an up-to-date representation of the /// dtree. Traverses the entire tree so is expensive to run and /// should only be used when loading new documents or creating /// a new canvas view. void build_visual_representation(); /// All changes to the document should be made by submitting /// ActionBase derived objects to the 'queue_action' function, /// so that an undo stack can be kept. They are then processed /// by calling the 'process_action_queue' method (only /// available from this thread). void queue_action(std::shared_ptr); /// Run all cells in the document. Only execute this on the /// main (GUI) thread! void run_all_cells(); /// Run a single cell in the document. Only execute this on the /// main (GUI) thread! void run_cell(DTree::iterator, bool shift_pressed); /// Run all cells containing references to a given variable. /// This will eventually call compute->execute on the /// relevant cells, but is part of DocumentThread because /// this function takes care of scanning through the document /// to find out *which* cells to run. void run_cells_referencing_variable(std::string variable, double value); /// Setup an empty new document with a single Python input cell. void new_document(); /// Load a new notebook from a JSON string. Should only be called /// from the GUI thread. Will cancel any pending operations on the /// existing notebook (if present) first. virtual void load_from_string(const std::string&); virtual void on_interactive_output(const nlohmann::json& msg); virtual void set_progress(const std::string& msg, int cur_step, int total_steps); /// One undo step. void undo(); /// One redo step. void redo(); /// Find string, return match, or a (doc.end(), std::string::npos). std::pair find_string(DTree::iterator start_it, size_t start_pos, const std::string& f, bool case_ins) const; /// All actions are considered pieces of code which are part of /// DocumentThread, so they should be able to access its private /// variables. friend ActionBase; friend ActionAddCell; friend ActionPositionCursor; friend ActionRemoveCell; friend ActionSplitCell; friend ActionSetRunStatus; friend ActionInsertText; friend ActionCompleteText; friend ActionEraseText; class Prefs { public: using ColorMap = std::map; Prefs(bool use_defaults = false); void save(); int font_step; bool highlight; bool is_registered; bool is_anonymous; std::map colours; std::string git_path; std::string python_path; bool move_into_new_cell; bool tab_completion; bool microtex; bool auto_close_latex; private: nlohmann::json data; std::string config_path; }; Prefs prefs; /// Set user details which will be sent to the Cadabra log /// server. void set_user_details(const std::string& name, const std::string& email, const std::string& affiliation); /// For debugging purposes we store the main thread idea here (the /// main Cadabra class sets it). std::thread::id main_thread_id; protected: GUIBase *gui; ComputeThread *compute; /// The actual document tree. This object is only modified on /// the GUI thread, either directly by code in /// DocumentThread, or by code in the various objects derived /// from ActionBase. In particular, ComputeThread cannot /// access this tree directly. DTree doc; /// Iterator to the currently active cell in the notebook, or /// doc.end() if nothing active. DTree::iterator current_cell; /// If `follow_mode` is true, the `follow_cell` can be set, /// and this will make the canvas view follow the indicated /// cell. bool follow_mode; DTree::iterator follow_cell, follow_last_cell; /// The action undo/redo/todo stacks and logic to execute /// them. These stacks can be accessed from both the /// DocumentThread as well as the ComputeThread (the latter /// does it through the DocumentThread::queue_action method), /// so they need a mutex to access them. std::mutex stack_mutex; typedef std::stack > ActionStack; ActionStack undo_stack, redo_stack; std::queue > pending_actions; bool disable_stacks; /// Process the action queue. It is allowed to call queue_action() above /// while this is running. So a running action can add more actions. /// Needs to be called on the GUI thread! void process_action_queue(); virtual bool on_unhandled_error(const std::exception& err); /// Help system enum class help_t { algorithm, property, latex, none }; bool help_type_and_topic(const std::string& before, const std::string& after, help_t& help_type, std::string& help_topic) const; }; } ================================================ FILE: client_server/GUIBase.hh ================================================ #pragma once #include #include "DataCell.hh" namespace cadabra { /// \ingroup clientserver /// /// Abstract base class with methods that need to be implemented /// by any GUI. You need to derive from this class as well as from /// the DocumentThread class. class GUIBase { public: /// The basic manipulations that a GUI needs to implement are /// adding, removing and updating (refreshing the display of) /// a cell. The code in DocumentThread will call these to make /// the GUI update its display. Called on the document thread. virtual void update_cell(const DTree&, DTree::iterator)=0; /// Remove a single cell together with all its child cells. /// Some toolkits (e.g. Gtk) will take care of that entire /// child tree removal automatically, in which case the only /// thing that needs done for the child cells is to remove /// any reference to their VisualCells. virtual void remove_cell(const DTree&, DTree::iterator)=0; /// Remove all GUI cells from the display (used as a quick way /// to clear all before loading a new document). virtual void remove_all_cells()=0; /// Add a GUI cell corresponding to the document cell at the /// iterator. The GUI needs to figure out from the location of /// this cell in the DTree where to insert the cell in the visual /// display. If the 'visible' flag is false, hide the cell from /// view independent of whether its hidden flag is set (this /// is only used when constructing a document on load time and /// we do not want to show cells until they have all been added /// to the document). virtual void add_cell(const DTree&, DTree::iterator, bool visible)=0; /// Position the cursor in the current canvas in the widget /// corresponding to the indicated cell. virtual void position_cursor(const DTree&, DTree::iterator, int)=0; /// Retrieve the position of the cursor in the current cell. virtual size_t get_cursor_position(const DTree&, DTree::iterator)=0; /// Hide all visual cells (on all canvasses) corresponding to /// a given document cell. virtual void hide_visual_cells(DTree::iterator)=0; /// Dim all output cells of the visual cells corresponding to the /// given document cell. virtual void dim_output_cells(DTree::iterator)=0; /// Network status is propagated from the ComputeThread to the /// GUI using the following methods. These get called on the /// compute thread (as opposed to the functions above, which get /// called on the gui thread). //@{ virtual void on_connect()=0; virtual void on_disconnect(const std::string& reason)=0; virtual void on_network_error()=0; virtual void on_kernel_runstatus(bool)=0; //@} /// When the ComputeThread needs to modify the document, it /// stores an ActionBase object on the stack (see the /// DocumenThread class) and then wakes up the GUI thread /// signalling it to process this action. The following member /// should wake up the GUI thread and make it enter the /// processing part. You can call this from *any* thread. virtual void process_data()=0; }; }; ================================================ FILE: client_server/ScriptThread.cc ================================================ #include "ScriptThread.hh" #include "DocumentThread.hh" #include "GUIBase.hh" #include "Actions.hh" #include // generators #include // streaming operators etc. #include #include #include #include "nlohmann/json.hpp" using namespace cadabra; ScriptThread::ScriptThread(DocumentThread *d, GUIBase *g) : document(d), gui(g), local_port(0) { // Lock the URL (port and auth token) until the thread is // properly spun up. url_mutex.lock(); boost::uuids::uuid authentication_uuid = boost::uuids::random_generator()(); authentication_token = boost::uuids::to_string( authentication_uuid ); } ScriptThread::~ScriptThread() { url_mutex.unlock(); } void ScriptThread::on_open(websocket_server::id_type ws_id) { std::cerr << "on_open" << std::endl; // std::lock_guard lock(ws_mutex); // Connection con; // con.ws_id = ws_id; // // snoop::log(snoop::info) << "Connection " << con.uuid << " open." << snoop::flush; // connections[ws_id]=con; } void ScriptThread::on_close(websocket_server::id_type ws_id) { std::cerr << "on_close" << std::endl; // std::lock_guard lock(ws_mutex); // // auto it = connections.find(hdl); // // snoop::log(snoop::info) << "Connection " << it->second.uuid << " close." << snoop::flush; // connections.erase(ws_id); // // if(exit_on_disconnect) // exit(-1); } void ScriptThread::on_message(websocket_server::id_type ws_id, const std::string& msg, const websocket_server::request_type& req, const std::string& ip_address) { // std::cerr << "received: " << msg << std::endl; if(req.target().substr(1) != authentication_token) { // Unauthorised. return; } try { auto jmsg = nlohmann::json::parse(msg); std::cerr << "received message: " << jmsg.dump(3) << std::endl; std::string msg_action = jmsg.value("action", ""); size_t msg_serial = jmsg.value("serial", 0); if(msg_action=="run_all_cells") { // We cannot call directly into the document methods here, // because we are not on the main thread. So we queue an // action, to be dispatched later. std::shared_ptr action = std::make_shared(); action->callback = [this, ws_id, msg_serial, msg_action]() { nlohmann::json msg; msg["status"]="completed"; msg["serial"]=msg_serial; msg["action"]=msg_action; wserver.send(ws_id, msg.dump()); }; document->queue_action(action); gui->process_data(); } else if(msg_action=="run_cell") { uint64_t msg_cell_id = jmsg.value("cell_id", uint64_t(0)); DataCell::id_t id; id.id=msg_cell_id; std::shared_ptr action = std::make_shared(id); action->callback = [this, ws_id, msg_serial, msg_action]() { nlohmann::json msg; msg["status"]="completed"; msg["serial"]=msg_serial; msg["action"]=msg_action; wserver.send(ws_id, msg.dump()); }; document->queue_action(action); gui->process_data(); } else if(msg_action=="open") { std::string notebook = jmsg.value("notebook", ""); std::shared_ptr action = std::make_shared(notebook); action->callback = [this, ws_id, msg_serial, msg_action]() { nlohmann::json msg; msg["status"]="completed"; msg["serial"]=msg_serial; msg["action"]=msg_action; wserver.send(ws_id, msg.dump()); }; document->queue_action(action); gui->process_data(); } else if(msg_action=="insert_text") { std::string content = jmsg.value("content", ""); DataCell::id_t id; id.id=0; std::shared_ptr action = std::make_shared(id, 0, content); action->callback = [this, ws_id, msg_serial, msg_action]() { nlohmann::json msg; msg["status"]="completed"; msg["serial"]=msg_serial; msg["action"]=msg_action; wserver.send(ws_id, msg.dump()); }; document->queue_action(action); gui->process_data(); } else if(msg_action=="add_cell") { std::string content = jmsg.value("content", ""); uint64_t cell_id = jmsg.value("cell_id", uint64_t(0)); DataCell::id_t this_id; if(cell_id!=0) this_id.id=cell_id; DataCell dc(this_id, DataCell::CellType::python, content); DataCell::id_t ref_id; ref_id.id=0; // relative to current cell std::shared_ptr action = std::make_shared(dc, ref_id, ActionAddCell::Position::before, true); nlohmann::json reply_msg; reply_msg["status"]="completed"; reply_msg["serial"]=msg_serial; reply_msg["action"]=msg_action; reply_msg["cell_id"]=dc.id().id; action->callback = [this, ws_id, reply_msg]() { wserver.send(ws_id, reply_msg.dump()); }; document->queue_action(action); gui->process_data(); } } catch(nlohmann::json::exception& ex) { std::cerr << "Received unparsable message: " << msg << std::endl; } } void ScriptThread::run() { wserver.set_message_handler(std::bind(&ScriptThread::on_message, this, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3, std::placeholders::_4)); wserver.set_connect_handler(std::bind(&ScriptThread::on_open, this, std::placeholders::_1)); wserver.set_disconnect_handler(std::bind(&ScriptThread::on_close, this, std::placeholders::_1)); wserver.listen(0); local_port = wserver.get_local_port(); url_mutex.unlock(); wserver.run(); } uint16_t ScriptThread::get_local_port() const { std::lock_guard guard(url_mutex); return local_port; } std::string ScriptThread::get_authentication_token() const { std::lock_guard guard(url_mutex); return authentication_token; } void ScriptThread::terminate() { wserver.stop(); } ================================================ FILE: client_server/ScriptThread.hh ================================================ #pragma once #include "websocket_server.hh" namespace cadabra { /// \ingroup scripting /// /// Class containing all functionality which allows users to /// control a running notebook client by sending it commands /// via a websocket port. This can be used to script its /// operations. /// /// This runs as a separate thread, so it is neither on the /// main thread (GUI, DocumentThread) nor on the thread that /// handles communication with the cadabra-server. class DocumentThread; class GUIBase; class ScriptThread { public: ScriptThread(DocumentThread *, GUIBase *); ~ScriptThread(); ScriptThread(const ScriptThread&) = delete; void run(); void terminate(); uint16_t get_local_port() const; std::string get_authentication_token() const; private: void on_message(websocket_server::id_type id, const std::string& msg, const websocket_server::request_type& req, const std::string& ip_address); void on_open(websocket_server::id_type id); void on_close(websocket_server::id_type id); websocket_server wserver; DocumentThread *document; GUIBase *gui; // Authentication token, needs to be sent along with any message. // Gets shown when we start up. mutable std::mutex url_mutex; std::string authentication_token; uint16_t local_port; }; } ================================================ FILE: client_server/Server.cc ================================================ #include #include "Server.hh" #include "InstallPrefix.hh" #include #include #include #include #include #include // generators #include // streaming operators etc. #include #include #include #include "Config.hh" //#ifndef ENABLE_JUPYTER //#include "Snoop.hh" //#endif #include "CdbPython.hh" #include "SympyCdb.hh" #include "pythoncdb/py_helpers.hh" // #define DEBUG 1 bool interrupt_block=false; Server::Server() // : return_cell_id(std::numeric_limits::max()/2) { boost::uuids::uuid authentication_uuid = boost::uuids::random_generator()(); authentication_token = boost::uuids::to_string( authentication_uuid ); // FIXME: we do not actually do anything with this. init(); } Server::Server(const std::string& socket) // : return_cell_id(std::numeric_limits::max()/2) { init(); } Server::~Server() { } Server::CatchOutput::CatchOutput() { } Server::CatchOutput::CatchOutput(const CatchOutput&) { } void Server::CatchOutput::write(const std::string& str) { // std::cerr << "Python wrote: " << str << std::endl; collect+=str; } void Server::CatchOutput::clear() { // std::cerr << "Python clear" << std::endl; collect=""; } std::string Server::CatchOutput::str() const { return collect; } std::string Server::architecture() const { return "client-server"; } PYBIND11_EMBEDDED_MODULE(cadabra2_internal, m) { // auto cadabra_module = pybind11::module::import("cadabra2"); pybind11::class_(m, "CatchOutput") .def("write", &Server::CatchOutput::write) .def("clear", &Server::CatchOutput::clear) ; pybind11::class_(m, "Server") .def("send", &Server::send) .def("handles", &Server::handles) .def("architecture", &Server::architecture) .def("send_progress_update", &Server::send_progress_update); } std::string parse_error(const std::string& error, const std::string& input) { try { // Find syntax errors std::regex syntax_error(R"(SyntaxError: \('([^']+)', \('', (\d+), (\d+),.*)"); std::smatch sm; if (std::regex_match(error, sm, syntax_error)) { std::string error_type = sm[1]; size_t line_no = stoi(sm[2]) - 1; size_t col_no = stoi(sm[3]); return "SyntaxError: " + error_type + "\n" + "Line " + std::to_string(line_no) + ", column " + std::to_string(col_no) + "\n" + nth_line(input, line_no - 1) + "\n" + std::string(col_no > 1 ? col_no - 2 : 0, ' ') + "^"; } // Find other errors std::regex exception_name(R"(([a-zA-Z_][a-zA-Z0-9_]*):.*)"); std::string first_line = nth_line(error, 0); if (std::regex_match(first_line, sm, exception_name)) { std::string name = sm[1]; std::regex line_no(R"(\((\d+)\): )"); std::smatch lm; if (std::regex_search(error, lm, line_no)) { size_t l = stoi(lm[1]) - 1; return std::regex_replace(error, line_no, "Notebook Cell (Line " + std::to_string(l) + "): " + nth_line(input, l - 1)); } } return error; } catch (std::exception& e) { return error; } } void Server::init() { started=false; main_module = pybind11::module::import("__main__"); main_namespace = main_module.attr("__dict__"); // Make the C++ CatchOutput class visible on the Python side. auto python_path = cadabra::install_prefix_of_module(); // FIXME: since the logic above *requires* that we can find the // `cdb.main` module, we will already have the correct path // set. So appending it once more below is useless. std::string stdOutErr = "import sys\n" "sys.path.append(r'"+python_path+"')\n" "from cadabra2_internal import Server, CatchOutput\n" "server=0\n" "def setup_catch(cO, cE, sE):\n" " global server\n" " sys.stdout=cO\n" " sys.stderr=cE\n" " server=sE\n"; run_string(stdOutErr, false); // Setup the C++ output catching objects and setup the Python side to // use these as stdout and stderr streams. pybind11::object setup_catch = main_module.attr("setup_catch"); try { setup_catch(std::ref(catchOut), std::ref(catchErr), std::ref(*this)); } catch(pybind11::error_already_set& ex) { //#ifndef ENABLE_JUPYTER // snoop::log(snoop::fatal) << "Failed to initialise Python bridge." << snoop::flush; //#endif PyErr_Print(); throw; } // Get the Python thread id. std::string code_get_id = "import threading; print(threading.get_native_id())"; std::string main_thread_id_str = run_string(code_get_id); main_thread_id = std::stol(main_thread_id_str); // std::cerr << "Server: main python thread id = " << main_thread_id << std::endl; // std::cerr << "Server: python_path = " << python_path << std::endl; // Call the Cadabra default initialisation script. // pybind11::eval_file(python_path + "/cadabra2_defaults.py"); // HERE: should use pybind11::eval_file instead, much simpler. // std::string startup = "f=open(r'" + python_path + "/cadabra2_defaults.py'); " "code=compile(f.read(), 'cadabra2_defaults.py', 'exec'); " "exec(code); f.close() "; run_string(startup); #ifdef DEBUG std::cerr << "Server::init: completed" << std::endl; #endif } int InterruptCheck(PyObject* obj, _frame* frame, int what, PyObject* arg) { std::cerr << "Server: interruptcheck" << std::endl; if(interrupt_block) { PyErr_SetString(PyExc_KeyboardInterrupt, "Stop script"); interrupt_block = false; } return 0; } std::string Server::run_string(const std::string& blk, bool handle_output, bool extract_variables, std::set remove_assignments) { //std::cerr << "RUN_STRING" << std::endl; // snoop::log("run") << blk << snoop::flush; std::string result, newblk; // Run block. Catch output. try { // Preparse input block. // std::cerr << "RAW:\n" << blk << std::endl; std::string error; newblk = cadabra::cdb2python_string(blk, true, error); // std::cerr << "PREPARSED:\n" << newblk << std::endl; // snoop::log("preparsed") << newblk << snoop::flush; run_string_variables.clear(); if(error.size()==0) { // If the preparsing found an error, do not attempt anything // else; just run it and let Python report the error. if(extract_variables) { cadabra::variables_in_code(newblk, run_string_variables); cadabra::variables_to_pull_in(newblk, run_string_variables); // std::cerr << "----" << std::endl; // for(const auto& name: run_string_variables) // std::cerr << "contains " << name << std::endl; } for(const std::string& var: remove_assignments) { newblk = cadabra::remove_variable_assignments(newblk, var); } // std::cerr << "REMOVED:\n" << newblk << std::endl; } #ifdef DEBUG std::cerr << "executing..." << std::endl; std::cerr << newblk << std::endl; #endif PyErr_Clear(); // PyEval_SetTrace(InterruptCheck, NULL); pybind11::exec(newblk.c_str(), main_namespace); // PyEval_SetTrace(NULL, NULL); #ifdef DEBUG std::cerr << "exec done" << std::endl; #endif // std::string object_classname = ignored.attr("__class__").attr("__name__").cast(); // std::cerr << "" << std::endl; if(handle_output) { result = catchOut.str(); catchOut.clear(); std::string result_err = catchErr.str(); if(result_err!="") std::cerr << "catchErr: " << result_err << std::endl; catchErr.clear(); } } catch(pybind11::error_already_set& ex) { #ifdef DEBUG std::cerr << "Server::run_string: exception " << ex.what() << std::endl; #endif // On macOS and with the current conda tools, // you can never exit from this block: throwing or simply // exiting with 'return ""' makes things hang. // The solution is to ex.restore(), see // https://github.com/pybind/pybind11/issues/1490 // Note: the restore() has the side effect of making the // error come back on any future pybind11::exec() call. std::string reason=parse_error(ex.what(), newblk); ex.restore(); if(reason.substr(0, 17)=="KeyboardInterrupt") { auto loc = reason.find("At:"); reason = "Interrupted a" + reason.substr(loc+1); } // std::cerr << "gobbling " << catchOut.str() << std::endl; // catchOut.clear(); throw std::runtime_error(reason); } server_stopwatch.stop(); return result; } //void Server::on_socket_init(websocketpp::connection_hdl, boost::asio::ip::tcp::socket & /* s */) // { // boost::asio::ip::tcp::no_delay option(true); // // FIXME: this used to work in older websocketpp //// s.lowest_layer().set_option(option); // } Server::Connection::Connection() { uuid = boost::uuids::random_generator()(); } void Server::on_open(websocket_server::id_type ws_id) { std::lock_guard lock(ws_mutex); Connection con; con.ws_id = ws_id; // snoop::log(snoop::info) << "Connection " << con.uuid << " open." << snoop::flush; connections[ws_id]=con; } void Server::on_close(websocket_server::id_type ws_id) { std::lock_guard lock(ws_mutex); // auto it = connections.find(hdl); // snoop::log(snoop::info) << "Connection " << it->second.uuid << " close." << snoop::flush; connections.erase(ws_id); if(exit_on_disconnect) exit(-1); } int quit(void *) { // std::cerr << "Server: setting python interrupt." << std::endl; PyErr_SetInterrupt(); // std::cerr << "Server: python interrupt set." << std::endl; return -1; } void Server::wait_for_websocket() { try { wserver.set_message_handler(std::bind(&Server::on_message, this, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3, std::placeholders::_4)); wserver.set_connect_handler(std::bind(&Server::on_open, this, std::placeholders::_1)); wserver.set_disconnect_handler(std::bind(&Server::on_close, this, std::placeholders::_1)); wserver.listen(run_on_port); auto p = wserver.get_local_port(); std::cout << p << std::endl; std::cout << authentication_token << std::endl; wserver.run(); } catch(std::exception& ex) { std::cerr << "Server::wait_for_websocket: exception " << ex.what() << std::endl; throw; } } void Server::wait_for_job() { // Infinite loop, waiting for the master thread to signal that a new block is // available, and processing it. Blocks are always processed sequentially // even though new ones may come in before previous ones have finished. // snoop::log(snoop::info) << "Waiting for blocks" << snoop::flush; #ifdef DEBUG std::cerr << "Server::wait_for__job: start" << std::endl; #endif #ifndef CDB_DONT_ACQUIRE_GIL // FIXME: why do we need this for the normal Cadabra server, but does // it hang in the Jupyter server? If you drop this from the normal // server, it will crash soon below. pybind11::gil_scoped_acquire acquire; #endif while(true) { #ifdef DEBUG std::cerr << "Server::wait_for__job: locking" << std::endl; #endif std::unique_lock lock(block_available_mutex); while(block_queue.size()==0) { #ifdef DEBUG std::cerr << "Server::wait_for__job: waiting" << std::endl; #endif block_available.wait(lock); } current_block = block_queue.front(); block_queue.pop(); lock.unlock(); server_stopwatch.reset(); server_stopwatch.start(); try { // We are done with the block_queue; release the lock so that the // master thread can push new blocks onto it. // snoop::log(snoop::info) << "Block finished running" << snoop::flush; server_stopwatch.stop(); current_ws_id = current_block.ws_id; current_id = current_block.cell_id; current_block.output = run_string(current_block.input, true, true, current_block.remove_variable_assignments); current_block.variables = run_string_variables; on_block_finished(current_block); } catch(std::runtime_error& ex) { #ifdef DEBUG std::cerr << "Exception caught, acquiring lock" << std::endl; #endif server_stopwatch.stop(); // snoop::log(snoop::info) << "Python runtime exception" << snoop::flush; // On error we remove all other blocks from the queue. lock.lock(); #ifdef DEBUG std::cerr << "Lock acquired" << std::endl; #endif std::queue empty; std::swap(block_queue, empty); lock.unlock(); current_block.output = catchOut.str(); catchOut.clear(); current_block.error = ex.what(); on_block_error(current_block); } catch(std::exception& ex) { server_stopwatch.stop(); // snoop::log(snoop::info) << "System exception" << snoop::flush; lock.lock(); std::queue empty; std::swap(block_queue, empty); lock.unlock(); current_block.output=catchOut.str(); catchOut.clear(); current_block.error=ex.what(); on_kernel_fault(current_block); // Keep running } } } void Server::stop_block() { // interrupt_block=true; // std::cerr << "Server: sending SIGINT to python thread." << std::endl; PyErr_SetInterrupt(); // PyGILState_STATE state = PyGILState_Ensure(); // // PyThreadState_SetAsyncExc ? // Py_AddPendingCall(&quit, NULL); // PyGILState_Release(state); // PyGILState_STATE state = PyGILState_Ensure(); // std::cerr << "Server: make thread " << main_thread_id << " raise exception" << std::endl; // PyThreadState_SetAsyncExc(main_thread_id, PyExc_Exception); // PyGILState_Release(state); } Server::Block::Block() : ws_id(-1), cell_id(0) { } Server::Block::Block(websocket_server::id_type ws_id_, const std::string& str, uint64_t id_, const std::string& msg_type_) : ws_id(ws_id_), msg_type(msg_type_), input(str), cell_id(id_) { nlohmann::json content, header; response["header"]=header; response["content"]=content; response["msg_type"]=msg_type; } void Server::on_message(websocket_server::id_type ws_id, const std::string& msg, const websocket_server::request_type& req, const std::string& ip_address) { std::lock_guard lock(ws_mutex); auto it = connections.find(ws_id); if(it==connections.end()) { //#ifndef ENABLE_JUPYTER // snoop::log(snoop::warn) << "Message from unknown connection." << snoop::flush; //#endif return; } // std::cout << "Message from " << it->second.uuid << std::endl; dispatch_message(ws_id, msg); } void Server::dispatch_message(websocket_server::id_type ws_id, const std::string& json_msg) { // std::cout << json_msg << std::endl; nlohmann::json root; try { root = nlohmann::json::parse(json_msg); } catch(nlohmann::json::exception& ex) { //#ifndef ENABLE_JUPYTER // snoop::log(snoop::error) << "Cannot parse message " << json_msg << snoop::flush; //#endif return; } // Check that this message is authenticated. std::string auth_token = root.value("auth_token", ""); if(auth_token!=authentication_token) { std::cerr << "Received block with incorrect authentication token: " << auth_token << "." << std::endl; return; } const auto& content = root["content"]; const auto& header = root["header"]; std::string msg_type = header["msg_type"].get(); // std::cerr << "received msg_type |" << msg_type << "|" << std::endl; if(msg_type=="execute_request") { std::string code = content.value("code",""); // std::cerr << code << std::endl; uint64_t id = header.value("cell_id", uint64_t(0)); std::unique_lock lock(block_available_mutex); Block block(ws_id, code, id, msg_type); if(content.count("remove_variable_assignments")==1) { block.remove_variable_assignments.insert(content.value("remove_variable_assignments", "")); } if(header.count("output_cell_ids")==1) { block.reuse_output_cell_ids = header.value("output_cell_ids", std::deque()); } block.response["header"]["parent_origin"]="client"; block.response["header"]["parent_id"]=id; block_queue.push(block); block_available.notify_one(); } else if(msg_type=="execute_interrupt") { std::unique_lock lock(block_available_mutex); // std::cerr << "Server: requesting python thread stop." << std::endl; stop_block(); // std::cerr << "Server: clearing block queue." << std::endl; std::queue empty; std::swap(block_queue, empty); // std::cerr << "Server: block queue cleared." << std::endl; //snoop::log(snoop::warn) << "Job stop requested." << snoop::flush; } else if(msg_type=="init") { // Stop any running blocks. std::unique_lock lock(block_available_mutex); stop_block(); std::queue empty; std::swap(block_queue, empty); } else if(msg_type=="complete") { // Schedule a block which runs code to complete the given string. std::string str=root["string"].get(); int alternative=root["alternative"].get(); std::string todo="print(__cdbkernel__.completer.complete(\""+str+"\", "+std::to_string(alternative)+"))"; uint64_t id = header.value("cell_id", uint64_t(0)); Block blk(ws_id, todo, id, "completed"); blk.response["header"]["cell_id"]=id; blk.response["content"]["original"]=str; blk.response["content"]["position"]=root["position"].get(); blk.response["content"]["alternative"]=alternative; std::unique_lock lock(block_available_mutex); block_queue.push(blk); block_available.notify_one(); } else if(msg_type=="exit") { exit(-1); } } void Server::on_block_finished(Block block) { auto& header = block.response["header"]; auto& content = block.response["content"]; if(block.msg_type=="completed") { // FIXME: need a better way to get the result out of python, so we can spot None // while keeping the possibility to complete 'No' -> 'None'. std::string res=block.output; if(res.size()>0 && res[res.size()-1]=='\n') res=res.substr(0, res.size()-1); if(res=="None") res=""; block.response["content"]["completed"]=res; } else { header["cell_origin"]="server"; header["cell_id"]=cadabra::generate_uuid(); header["time_total_microseconds"]=std::to_string(server_stopwatch.seconds()*1e6L + server_stopwatch.useconds()); header["time_sympy_microseconds"]=std::to_string(sympy_stopwatch.seconds()*1e6L + sympy_stopwatch.useconds()); header["last_in_sequence"]=block.error.empty(); // If this is the output followed by an error, it's not the last output cell for the running block. content["output"]=block.output; block.response["msg_type"]="output"; } // Inform the notebook about the variables referenced in this block. content["variables"]=block.variables; std::ostringstream str; str << block.response << std::endl; send_json(str.str()); } bool Server::handles(const std::string& otype) const { if(otype=="latex_view" || otype=="image_png" || otype=="image_svg" || otype=="verbatim") return true; return false; } uint64_t Server::send(const std::string& output, const std::string& msg_type, uint64_t parent_id, uint64_t cell_id, bool last) { // This is the function exposed to the Python side; not used // directly in the server to send block output back to the client // (that's all handled by on_block_finished above). // std::cerr << "Send: " << msg_type << ", " << output.substr(0, std::min(size_t(40), output.size())) << std::endl; nlohmann::json json, header, content; uint64_t return_cell_id=cell_id; if(return_cell_id==0) { if(current_block.reuse_output_cell_ids.size()>0) { // std::cerr << "Re-using existing output cell." << std::endl; return_cell_id=current_block.reuse_output_cell_ids.front(); current_block.reuse_output_cell_ids.pop_front(); } else { return_cell_id=cadabra::generate_uuid(); } } if(parent_id==0) header["parent_id"]=current_id; else header["parent_id"]=parent_id; header["parent_origin"]="client"; header["cell_id"]=return_cell_id; header["cell_origin"]="server"; header["time_total_microseconds"]=std::to_string(server_stopwatch.seconds()*1e6L + server_stopwatch.useconds()); header["time_sympy_microseconds"]=std::to_string(sympy_stopwatch.seconds()*1e6L + sympy_stopwatch.useconds()); header["last_in_sequence"]=last; content["output"]=output; json["header"]=header; json["content"]=content; json["msg_type"]=msg_type; std::ostringstream str; str << json << std::endl; send_json(str.str()); return return_cell_id; } void Server::send_progress_update(const std::string& msg, int n, int total) { nlohmann::json json, content, header; header["parent_id"] = 0; header["parent_origin"] = "client"; header["cell_id"] = 0; header["cell_origin"] = "server"; content["msg"] = msg; content["n"] = n; content["total"] = total; json["header"] = header; json["content"] = content; json["msg_type"] = "progress_update"; std::ostringstream str; str << json << std::endl; send_json(str.str()); } void Server::send_json(const std::string& msg) { // std::cerr << "*** sending message " << msg << std::endl; std::lock_guard lock(ws_mutex); wserver.send(current_ws_id, msg); } void Server::on_block_error(Block blk) { if(blk.output!="") on_block_finished(blk); std::lock_guard lock(ws_mutex); // Make a JSON message. nlohmann::json json, content, header; auto return_cell_id=cadabra::generate_uuid(); header["parent_id"]=blk.cell_id; header["parent_origin"]="client"; header["cell_id"]=return_cell_id; header["cell_origin"]="server"; header["last_in_sequence"]=true; content["output"]=blk.error; json["header"]=header; json["content"]=content; json["msg_type"]="error"; std::ostringstream str; str << json << std::endl; // std::cerr << "cadabra-server: sending error, " << str.str() << std::endl; wserver.send(blk.ws_id, str.str()); } void Server::on_kernel_fault(Block blk) { if(blk.output!="") on_block_finished(blk); std::lock_guard lock(ws_mutex); // Make a JSON message. nlohmann::json json, content, header; auto return_cell_id=cadabra::generate_uuid(); header["parent_id"]=blk.cell_id; header["parent_origin"]="client"; header["cell_id"]=return_cell_id; header["cell_origin"]="server"; header["last_in_sequence"]=true; content["output"]=blk.error; json["header"]=header; json["content"]=content; json["msg_type"]="fault"; std::ostringstream str; str << json << std::endl; // std::cerr << "cadabra-server: sending kernel crash report, " << str.str() << std::endl; wserver.send(blk.ws_id, str.str()); } void Server::run(int port, bool eod) { exit_on_disconnect = eod; run_on_port = port; // Python has to be running on the main thread, otherwise // it cannot receive signals. So we spawn the websocket // listener on a separate thread, and then do the blocking // "wait for python code to execute" loop on the main // thread. // std::thread::id tmp= std::this_thread::get_id(); // main_thread_id = *(unsigned *)&tmp; // std::cerr << "Server: main_thread_id = " << main_thread_id << std::endl; runner = std::thread(std::bind(&Server::wait_for_websocket, this)); wait_for_job(); // pybind11::gil_scoped_release release; } ================================================ FILE: client_server/Server.hh ================================================ #pragma once #include #include #include #include #include #include #include #include #include "nlohmann/json.hpp" #include "websocket_server.hh" #include "Stopwatch.hh" /// \ingroup clientserver /// /// Object representing a Cadabra server, capable of receiving messages /// on a websocket, running Python code, and sending output back to the /// client. /// /// Contains the logic to intercept raw Python output but also provides /// functionality to the Python side which enables clients to send /// various objects in JSON encoded form. See Server::on_block_finished /// for the format of these messages. /// /// Every block is run on the same Python scope. They run sequentially, /// one at a time, on a thread separate from the websocket++ main loop. /// When the Python code finishes (or when it is interrupted), this thread /// locks the socket_mutex and calls on_block_finished(). class Server { public: Server(); Server(const Server&)=delete; Server(const std::string& socket); virtual ~Server(); /// The only user-visible part: just instantiate a server object and /// start it with run(). This will not return until the server has /// been shut down. Picks a random port when port==0. If /// `exit_on_disconnect==false`, keep the server alive on the /// same port and with the same authentication token when /// the connection drops (so you can reconnect). void run(int port=0, bool exit_on_disconnect=true); /// Python output catching. We implement this in a C++ class /// because we want to be able to catch each line of output /// separately, and perhaps add additional functionality to send /// out-of-band messages to the client later. class CatchOutput { public: CatchOutput(); CatchOutput(const CatchOutput&); void write(const std::string& txt); void clear(); std::string str() const; private: std::string collect; }; CatchOutput catchOut, catchErr; Stopwatch server_stopwatch; Stopwatch sympy_stopwatch; /// Raw code to send a string (which must be JSON formatted) as /// a message to the client. Handles communication of the result /// back to the client in JSON format. This is always of the /// form /// /// { "header": { "parent_id": "...", /// "parent_origin": "client" | "server", /// "cell_id": "...", /// "cell_origin": "client" | "server" /// }, /// "content": { "output": "...", /// "width": int (optional), /// "height": int (optional) /// }, /// "msg_type": "..." /// } /// /// msg_type can be "output", "latex", "image_png" and so on, /// corresponding to the possible values of DataCell::CellType. /// /// Returns the serial number of the new cell sent. virtual uint64_t send(const std::string& output, const std::string& msg_type, uint64_t parent_id=0, uint64_t cell_id=0, bool last_in_sequence=false); void send_progress_update(const std::string& msg, int n, int total); void send_json(const std::string&); bool handles(const std::string& otype) const; std::string architecture() const; /// Thread entry point for the code that waits for blocks to /// appear on the block queue, and executes them in turn. /// In practice we run this on the main thread. void wait_for_job(); /// Thread entry point for code that sets up and runs the /// websocket listener. void wait_for_websocket(); protected: void init(); // WebSocket++ dependent parts below. void on_message(websocket_server::id_type id, const std::string& msg, const websocket_server::request_type& req, const std::string& ip_address); void on_open(websocket_server::id_type id); void on_close(websocket_server::id_type id); websocket_server wserver; // Connection tracking. There can be multiple connections to // the server, but they all have access to the same Python // scope. With multiple connections, one can inspect the Python // stack from a different client (e.g. for debugging purposes). // All connections share the same authentication token. class Connection { public: Connection(); websocket_server::id_type ws_id; boost::uuids::uuid uuid; }; typedef std::map ConnectionMap; ConnectionMap connections; // Authentication token, needs to be sent along with any message. // Gets set when the server announces its port. std::string authentication_token; // Mutex to be able to use the websocket layer from both the // main loop and the python-running thread. std::mutex ws_mutex; // Basics for the working thread that processes blocks. std::thread runner; std::mutex block_available_mutex; std::condition_variable block_available; bool exit_on_disconnect; int run_on_port; unsigned long main_thread_id; // Data and connection info for a single block of code. class Block { public: Block(); Block(websocket_server::id_type, const std::string&, uint64_t id, const std::string& msg_type); websocket_server::id_type ws_id; // FIXME: decouple from websocket? std::string msg_type; std::string input; std::string output; std::string error; uint64_t cell_id; std::set variables; std::set remove_variable_assignments; // When a cell is re-run on variable change, we re-use the output cells of the // previous run. The IDs of these cells are sent to us by the frontend. We // store them here, and then pop them off the front for each call to `send`. std::deque reuse_output_cell_ids; // Response message, partially filled in when the // request comes in. nlohmann::json response; }; std::queue block_queue; Block current_block; websocket_server::id_type current_ws_id; uint64_t current_id; // id of the block given to us by the client. // Run a piece of Python code. This is called from a separate // thread constructed by on_message(). std::string run_string(const std::string&, bool handle_output=true, bool extract_variables=false, std::set remove_variable_assignments=std::set()); std::set run_string_variables; /// Called by the run_block() thread upon completion of the /// task. This will send any output generated by printing directly /// to stdout or stderr from Python (so, output not generated by /// using the 'display' function). Indicates to the client that /// this block has finished executing. Will send an empty string /// if there has been no output 'print'ed. virtual void on_block_finished(Block); virtual void on_block_error(Block); virtual void on_kernel_fault(Block); /// Halt the currently running block and prevent execution of any /// further blocks that may still be on the queue. void stop_block(); bool started; std::future job; /// Takes a JSON encoded message and performs the required action to process it. /// Where applicable these messages are compatible with IPython's message types, /// http://ipython.org/ipython-doc/dev/development/messaging.html void dispatch_message(websocket_server::id_type, const std::string& json_string); // Python global info. pybind11::scoped_interpreter guard; pybind11::module main_module; pybind11::object main_namespace; }; ================================================ FILE: client_server/Snoop.cc ================================================ #include "Snoop.hh" #include #include #include #include #include #include #include #ifndef _WIN32 #ifndef _WIN64 #include #endif #endif #include #ifdef _WIN32 #define timegm _mkgmtime #endif #include #include #include "nlohmann/json.hpp" #include "Stopwatch.hh" #ifdef ENCRYPT_PASSWORDS #include #endif std::string snoop_base64_encode(unsigned char const* bytes_to_encode, unsigned int in_len); std::string snoop_base64_decode(std::string const& encoded_string); #include #include #if !defined(_WIN32) && !defined(_WIN64) #include #include #else #include #include #include #include #endif #ifdef __APPLE__ #include #include "TargetConditionals.h" #else #include #endif #define BOOST_SPIRIT_THREADSAFE #include #include #include #include #if !defined(TARGET_OS_IPHONE) #include #include #endif #include #include // generators #include // streaming operators etc. #define SNOOPDEBUG(ln) //#define SNOOPDEBUG(ln) ln using namespace snoop; // Global instance. snoop::Snoop snoop::log; snoop::Flush snoop::flush; using u64_millis = std::chrono::duration; static std::chrono::time_point u64_to_time(uint64_t timestamp) { return std::chrono::time_point{u64_millis{timestamp}}; } // Until we have widespread C++20 support, we will need to get // the timezone offset using old-style code. This code returns // the offset in minutes the same as Javascript's // Date.getTimezoneOffset(); so if you are in zone GMT+2, // it returns -120. int local_utc_offset_minutes() { time_t t = time ( NULL ); struct tm * locg = localtime ( &t ); struct tm locl; memcpy ( &locl, locg, sizeof ( struct tm ) ); return -1 * (int)( timegm ( locg ) - mktime ( &locl ) ) / 60; } std::string safestring(const unsigned char *c) { if(c==0) return ""; else return std::string((const char *)c); } Snoop::Snoop() : sync_immediately_(false) , db(0) , payload_db(0) , auth_db(0) , insert_statement(0) , id_for_uuid_statement(0) , payload_insert_statement(0) , testq_statement(0) , connection_is_open(false) , connection_attempt_failed(false) { } Snoop::ParseError::ParseError(const std::string& m) : std::logic_error(m) { } void Snoop::init(const std::string& app_name, const std::string& app_version, std::string server, std::string dbname, std::string machine_id) { assert(app_name.size()>0); if(db==0) { // Only initialise if database has not been opened before this_app_.app_name=app_name; this_app_.app_version=app_version; this_app_.pid = getpid(); #if defined(_WIN32) || defined(_WIN64) DWORD dwVersion = 0; DWORD dwMajorVersion = 0; DWORD dwMinorVersion = 0; DWORD dwBuild = 0; dwVersion = GetVersion(); // Get the Windows version. dwMajorVersion = (DWORD)(LOBYTE(LOWORD(dwVersion))); dwMinorVersion = (DWORD)(HIBYTE(LOWORD(dwVersion))); // Get the build number. if (dwVersion < 0x80000000) dwBuild = (DWORD)(HIWORD(dwVersion)); this_app_.machine_id = "Windows "+std::to_string(dwMajorVersion)+"."+std::to_string(dwMinorVersion); #else struct utsname buf; if(uname(&buf)==0) { this_app_.machine_id = std::string(buf.sysname) +", "+buf.nodename+", "+buf.release+", "+buf.version+", "+buf.machine; #ifdef __linux__ this_app_.machine_id += std::string(", ")+buf.domainname; #endif } #endif if(machine_id!="") this_app_.machine_id = machine_id; // override (used in Objective-C backend). this_app_.user_id = get_user_uuid(app_name); auto duration = std::chrono::system_clock::now().time_since_epoch(); this_app_.create_millis = std::chrono::duration_cast(duration).count(); this_app_.create_timezone = local_utc_offset_minutes(); if(server.find("ws://")==std::string::npos && server.find("wss://")==std::string::npos) server_="wss://"+server; else server_=server; std::string payload_dbname, auth_dbname; if(dbname.size()==0) { #if defined(_WIN32) || defined(_WIN64) // On Windoze, we store in 'user_data_dir' provided by Glib. std::string logdir = Glib::get_user_data_dir(); mkdir(logdir.c_str()); #elif defined(TARGET_OS_IPHONE) // On iOS, we store in '~/Library/.log/'. std::string homedir=getenv("HOME"); homedir+="/Library"; std::string logdir = homedir+std::string("/.log"); mkdir(logdir.c_str(), 0700); #else // On Unix, we store in '~/.log/'. struct passwd *pw = getpwuid(getuid()); const char *homedir = pw->pw_dir; std::string logdir = homedir+std::string("/.log"); mkdir(logdir.c_str(), 0700); #endif SNOOPDEBUG( std::cerr << "Snoop::init: storing databases in " << logdir << std::endl; ) dbname=logdir+"/"+app_name+".db"; payload_dbname=logdir+"/"+app_name+"_payload.db"; auth_dbname=logdir+"/"+app_name+"_auth.db"; } else { payload_dbname=dbname+"_payload.db"; auth_dbname=dbname+"_auth.db"; dbname+=".db"; } // Open the main database. int ret = sqlite3_open_v2(dbname.c_str(), &db, SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE, NULL); if(ret) throw std::logic_error("Snoop::init: Cannot open main snoop database"); SNOOPDEBUG( std::cerr << "Snoop::init: main snoop database '" << dbname << "' open" << std::endl; ); sqlite3_exec(db, "PRAGMA synchronous = NORMAL", NULL, NULL, NULL); sqlite3_exec(db, "PRAGMA journal_mode = WAL", NULL, NULL, NULL); sqlite3_exec(db, "PRAGMA busy_timeout = 5000", NULL, NULL, NULL); // Open the payload database. ret = sqlite3_open_v2(payload_dbname.c_str(), &payload_db, SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE, NULL); if(ret) throw std::logic_error("Snoop::init: Cannot open payload database"); SNOOPDEBUG( std::cerr << "Snoop::init: payload database open" << std::endl; ); sqlite3_exec(payload_db, "PRAGMA synchronous = NORMAL", NULL, NULL, NULL); sqlite3_exec(payload_db, "PRAGMA journal_mode = WAL", NULL, NULL, NULL); sqlite3_exec(payload_db, "PRAGMA busy_timeout = 5000", NULL, NULL, NULL); SNOOPDEBUG( std::cerr << "Snoop::init: payload database configured" << std::endl; ); create_tables(); SNOOPDEBUG( std::cerr << "Snoop::init: logging and payload tables created" << std::endl; ); // Open the auth database. ret = sqlite3_open_v2(auth_dbname.c_str(), &auth_db, SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE, NULL); if(ret) throw std::logic_error("Snoop::init: Cannot open authentication database"); SNOOPDEBUG( std::cerr << "Snoop::init: auth database open" << std::endl; ); sqlite3_exec(auth_db, "PRAGMA synchronous = NORMAL", NULL, NULL, NULL); sqlite3_exec(auth_db, "PRAGMA journal_mode = WAL", NULL, NULL, NULL); sqlite3_exec(auth_db, "PRAGMA busy_timeout = 5000", NULL, NULL, NULL); SNOOPDEBUG( std::cerr << "Snoop::init: auth database configured" << std::endl; ); create_authentication_tables(); SNOOPDEBUG( std::cerr << "Snoop::init: auth database tables created" << std::endl; ); // Turn off synchronous writes as they seriously degrade performance (by orders of magnitude) // for our single-row inserts. See // http://stackoverflow.com/questions/1711631/improve-insert-per-second-performance-of-sqlite?rq=1 // for more options to speed things up. // The busy_timeout is to be compatible with litestream. // sqlite3_exec(db, "PRAGMA synchronous = OFF", NULL, NULL, NULL); // If this is a client, i.e. not a SnoopServer: obtain a uuid, start the websocket listener, // and sync with the remote server whatever has not yet been synced in previous runs. if(this_app_.app_name!="SnoopServer") { obtain_uuid(); start_websocket_client(); // Once started, the websocket client thread will call a sync // in the on_client_open callback, and then set the // connection_is_open flag. } } } std::string Snoop::get_user_uuid(const std::string& appname) { std::string user_uuid=""; #ifdef TARGET_OS_IPHONE // On iOS, config files go in '~/Library/.config/' std::string configdir=getenv("HOME"); configdir+="/Library/.config"; #else // on Unix/Windoze, Glib knows where to store config data. std::string configdir = Glib::get_user_config_dir(); #endif std::string configpath=configdir + std::string("/snoop/"+appname+".conf"); std::ifstream config(configpath); bool need_to_write=true; #ifndef TARGET_OS_IPHONE if(config) { std::set options; options.insert("user"); for(boost::program_options::detail::config_file_iterator i(config, options), e ; i != e; ++i) { // FIXME: http://stackoverflow.com/questions/24701547/how-to-parse-boolean-option-in-config-file if(i->string_key=="user") { user_uuid=i->value[0]; need_to_write=false; } } } #endif if(need_to_write) { // First time run; create config subdirectory for snoop. std::string configsubdir = configdir+std::string("/snoop"); #if defined(_WIN32) || defined(_WIN64) mkdir(configdir.c_str()); mkdir(configsubdir.c_str()); #else mkdir(configdir.c_str(), 0700); mkdir(configsubdir.c_str(), 0700); #endif std::ofstream config(configpath); if(config) { auto tmp = boost::uuids::random_generator()(); std::ostringstream str; str << tmp; user_uuid = str.str(); config << "user = " << user_uuid << std::endl; } else { SNOOPDEBUG( std::cerr << "Snoop: cannot write " << configpath << std::endl; ) } } return user_uuid; } void Snoop::set_sync_immediately(bool s) { sync_immediately_=s; } void Snoop::create_tables() { assert(db!=0); assert(payload_db!=0); std::lock_guard lock(sqlite_mutex); char *errmsg; // Create the `runs` table. if(sqlite3_exec(db, "create table if not exists runs (" "id integer primary key autoincrement," "uuid text," "create_millis unsigned big int," "receive_millis unsigned big int," "pid int," "ip_address text," "machine_id text," "app_name text," "app_version text," "user_id text," "server_status int," "create_timezone int default -1);" , NULL, NULL, &errmsg) != SQLITE_OK) { sqlite3_free(errmsg); throw std::logic_error("Failed to create table runs"); } // Add the create_timezone column, ignore any errors. if(sqlite3_exec(db, "alter table runs add column create_timezone int default -1;" , NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); } // Create the `logs` table. if(sqlite3_exec(db, "create table if not exists logs (" "log_id integer primary key autoincrement," "client_log_id integer," "id integer references login," "create_millis unsigned big int," "receive_millis unsigned big int," "loc_file text," "loc_line integer," "loc_method text," "type text," "message text," "server_status int," "session_uuid text," "create_timezone int default -1);" , NULL, NULL, &errmsg) != SQLITE_OK) { sqlite3_free(errmsg); throw std::logic_error("Failed to create table logs"); } // Add new session_uuid to existing table if we do not have it already. if(sqlite3_exec(db, "alter table logs add column session_uuid text;" , NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); } // Add the create_timezone column, ignore any errors. if(sqlite3_exec(db, "alter table logs add column create_timezone int default -1;" , NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); } // Create the `payload` table. if(sqlite3_exec(payload_db, "create table if not exists payload (" "payload_id integer primary key autoincrement," "client_payload_id integer," "id integer," /* 'references login', but that's not possible across databases */ "create_millis unsigned big int," "receive_millis unsigned big int," "payload text," "server_status int," "create_timezone int default -1);" , NULL, NULL, &errmsg) != SQLITE_OK) { sqlite3_free(errmsg); throw std::logic_error("Failed to create table payload"); } // Add the create_timezone column, ignore any errors. if(sqlite3_exec(payload_db, "alter table payload add column create_timezone int default -1;" , NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); } if(sqlite3_exec(db, "create index if not exists logs_id_idx on logs(id);", NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); throw std::logic_error("Failed to create index on logs.id: "+err); } if(sqlite3_exec(db, "create index if not exists logs_client_log_id_idx on logs(client_log_id);", NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); throw std::logic_error("Failed to create index on logs.client_log_id: "+err); } if(sqlite3_exec(db, "create index if not exists logs_create_millis_idx on logs(create_millis);", NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); throw std::logic_error("Failed to create index on logs.create_millis: "+err); } if(sqlite3_exec(db, "create index if not exists logs_type_idx on logs(type);", NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); throw std::logic_error("Failed to create index on logs.type: "+err); } if(sqlite3_exec(db, "create index if not exists runs_uuid_idx on runs(uuid);", NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); throw std::logic_error("Failed to create index on runs.uuid: "+err); } if(sqlite3_exec(db, "create index if not exists runs_machine_id_idx on runs(machine_id);", NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); throw std::logic_error("Failed to create index on runs.machine_id: "+err); } if(sqlite3_exec(db, "create index if not exists runs_app_version_idx on runs(app_version);", NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); throw std::logic_error("Failed to create index on runs.app_version: "+err); } if(sqlite3_exec(db, "create index if not exists runs_app_name_idx on runs(app_name);", NULL, NULL, &errmsg) != SQLITE_OK) { std::string err(errmsg); sqlite3_free(errmsg); throw std::logic_error("Failed to create index on runs.app_name: "+err); } } void Snoop::create_authentication_tables() { // We need two tables: one for the username/salted-password // combo (together with some other user info, probably) // and one for authorisation tickets issued after a successful // login. assert(auth_db!=0); std::lock_guard lock(sqlite_mutex); char *errmsg; if(sqlite3_exec(auth_db, "create table if not exists users (" "id integer primary key autoincrement," "username text unique," "password text," "enabled int," "\"group\" int," "email text," "device text);" , NULL, NULL, &errmsg) != SQLITE_OK) { std::string err("Failed to create table users: "); err += std::string(errmsg); sqlite3_free(errmsg); throw std::logic_error(err); } if(sqlite3_exec(auth_db, "create table if not exists groups (" "id integer primary key autoincrement," "\"group\" text unique," "description text," "enabled int);" , NULL, NULL, &errmsg) != SQLITE_OK) { std::string err("Failed to create table groups: "); err += std::string(errmsg); sqlite3_free(errmsg); throw std::logic_error(err); } // The following table is fairly trivial right now but can be used // to add other information at a later stage. Mainly introduced // so we can handle multiple user_detail records for each user. // if(sqlite3_exec(auth_db, "create table if not exists user_details (" // "id integer primary key autoincrement," // "user_id integer," // "device text);" // , NULL, NULL, &errmsg) != SQLITE_OK) { // sqlite3_free(errmsg); // throw std::logic_error("Failed to create table user_details"); // } if(sqlite3_exec(auth_db, "create table if not exists tickets (" "id integer primary key autoincrement," "user_id integer," "ticket_uuid text," "valid integer);" , NULL, NULL, &errmsg) != SQLITE_OK) { std::string err("Failed to create table tickets: "); err += std::string(errmsg); sqlite3_free(errmsg); throw std::logic_error(err); } if(sqlite3_exec(auth_db, "create table if not exists auth_attempts (" "id integer primary key autoincrement," "time_millis unsigned big int," "user_id integer," "ticket_id integer," "success integer," "msg text);" , NULL, NULL, &errmsg) != SQLITE_OK) { std::string err("Failed to create table auth_attempts: "); err += std::string(errmsg); sqlite3_free(errmsg); throw std::logic_error(err); } } void Snoop::obtain_uuid() { assert(db!=0); std::lock_guard lock(sqlite_mutex); sqlite3_stmt *statement=0; std::ostringstream ss; // ss << "select uuid from runs where pid=" << getpid() << " order by create_millis desc limit 1"; ss << "select uuid from runs where pid=" << getpid() << " order by create_millis desc limit 1"; int res = sqlite3_prepare(db, ss.str().c_str(), -1, &statement, NULL); if(res==SQLITE_OK) { int ret = sqlite3_step(statement); if(ret==SQLITE_ROW) { if(sqlite3_column_type(statement, 0)==SQLITE3_TEXT) this_app_.uuid=safestring(sqlite3_column_text(statement, 0)); else throw std::logic_error("Database inconsistency for obtain_uuid"); } } sqlite3_finalize(statement); // Generate and insert a new uuid if there is no existing entry for the current pid. if(this_app_.uuid.size()==0) { auto tmp = boost::uuids::random_generator()(); std::ostringstream str; str << tmp; this_app_.uuid = str.str(); SNOOPDEBUG( std::cerr << "Snoop: registering run with uuid " << this_app_.uuid << std::endl; ); bool stored = store_app_entry_without_lock(this_app_); if(!stored) { SNOOPDEBUG( std::cerr << "Snoop: uuid already registered" << std::endl; ); } else { SNOOPDEBUG( std::cerr << "Snoop: app run for this uuid stored" << std::endl; ); } } else { SNOOPDEBUG( std::cerr << "Snoop: pid already has a uuid, not re-registering" << std::endl; ); } } /// Get the app_version string for the last run on the given device. std::string Snoop::last_seen_version(std::string machine_id) { assert(db!=0); std::lock_guard lock(sqlite_mutex); sqlite3_stmt *statement=0; std::ostringstream ss; ss << "select app_version from runs where machine_id=? order by id desc limit 1"; std::string last_version=""; int res = sqlite3_prepare(db, ss.str().c_str(), -1, &statement, NULL); if(res==SQLITE_OK) { sqlite3_bind_text(statement, 1, machine_id.c_str(), machine_id.size(), 0); int ret = sqlite3_step(statement); if(ret==SQLITE_ROW) { if(sqlite3_column_type(statement, 0)==SQLITE3_TEXT) last_version=safestring(sqlite3_column_text(statement, 0)); } } sqlite3_finalize(statement); return last_version; } bool Snoop::authenticate(std::function f, std::string user, std::string pass, bool always_reauth) { // Wait for the websocket client thread to have spun up. std::unique_lock lock(connection_mutex); if(connection_is_open==false && connection_attempt_failed==false) { connection_cv.wait(lock); } authentication_callback=f; // If we have a ticket, setup check request and return true; // If we do not have a ticket, and user and pass are not empty, setup login check, return false. // First check if we have a ticket from a previous session. std::string ticket_uuid; if(always_reauth==false) ticket_uuid=get_local_ticket(); // Check ticket validity with server, or do password login. if(ticket_uuid.size()!=0 && user=="" && pass=="") { // have ticket and want to login without name/pass SNOOPDEBUG( std::cerr << "Have a ticket already, re-validating." << std::endl; ); std::ostringstream pack; pack << "{ \"authenticate\": {" << " \"ticket_uuid\": \"" << ticket_uuid << "\"} } \n"; if(!connection_is_open) { SNOOPDEBUG( std::cerr << "Connection not open, cannot verify ticket, allowed through." << std::endl; ); } else { wsclient.send(pack.str()); } return true; } else { ticket_uuid=""; // FIXME: should we also remove it from the local database? // Store the user in the local database. SNOOPDEBUG( std::cerr << "Snoop::authenticate: storing user '" << user << "' (but not password) in local database." << std::endl; ); if(!add_user(user, "", true)) { SNOOPDEBUG( std::cerr << "Snoop::authenticate: error storing user." << std::endl; ); } // Test: std::string local_user_vrfy = local_user(); SNOOPDEBUG( std::cerr << "Snoop::authenticate: local_user = " << local_user_vrfy << std::endl; ); // Send authentication request to server, asking for ticket. Then exit, as // the ticket will come back async. SNOOPDEBUG( std::cerr << "No ticket yet, requesting one with login/pass." << std::endl; ); std::ostringstream pack; pack << "{ \"authenticate\": {" << " \"user\": \"" << user << "\", \"password\": \"" << pass << "\" } } \n"; if(!connection_is_open) { SNOOPDEBUG( std::cerr << "No connection, cannot verify login credentials" << std::endl; ); // FIXME: need a 'false' which also shows that we have not been able to verify. } else { wsclient.send(pack.str()); } return false; } } void Snoop::set_error_handler(std::function f) { error_callback=f; } void Snoop::set_session_uuid(const std::string& s) { session_uuid_=s; } std::string Snoop::get_local_ticket() { std::lock_guard lock(sqlite_mutex); // Prepare the query for the ticket_uuid. This always // queries for the ticket with user number 0 (as the local // client storage does not store user details). sqlite3_stmt *statement=0; std::ostringstream ss; ss << "select ticket_uuid from tickets where user_id=0 and valid=1"; int res = sqlite3_prepare(auth_db, ss.str().c_str(), -1, &statement, NULL); if(res!=SQLITE_OK) return ""; int ret = sqlite3_step(statement); std::string ticket_uuid; if(ret==SQLITE_ROW) { ticket_uuid=safestring(sqlite3_column_text(statement, 0)); } sqlite3_finalize(statement); return ticket_uuid; } void Snoop::set_local_ticket(std::string ticket_uuid) { // First delete local ticket. std::ostringstream ss; sqlite3_stmt *statement=0; ss << "delete from tickets"; int res = sqlite3_prepare(auth_db, ss.str().c_str(), -1, &statement, NULL); if(res!=SQLITE_OK) throw std::logic_error("Snoop::delete_local_ticket: sqlite3_prepare failed error "+std::to_string(res)); res = sqlite3_step(statement); if(res!=SQLITE_DONE) throw std::logic_error("Snoop::store_ticket: sqlite3_step failed error "+std::to_string(res)); sqlite3_finalize(statement); // Now store. if(ticket_uuid.size()!=0) store_ticket(ticket_uuid, 0, 1); } Snoop::Ticket::Ticket() { ticket_id=-1; user_id=-1; valid=false; } Snoop::Ticket Snoop::is_ticket_valid(std::string ticket_uuid) { std::lock_guard lock(sqlite_mutex); // Prepare the query for the ticket_uuid. Ticket tret; tret.ticket_uuid=ticket_uuid; tret.valid = false; sqlite3_stmt *statement=0; std::ostringstream ss; ss << "select valid, users.enabled, ifnull(groups.enabled,1) as groupsenabled, tickets.id, users.id from tickets join users on tickets.user_id=users.id left join groups on users.\"group\"=groups.id where ticket_uuid=?"; int res = sqlite3_prepare(auth_db, ss.str().c_str(), -1, &statement, NULL); if(res != SQLITE_OK) return tret; sqlite3_bind_text(statement, 1, ticket_uuid.c_str(), ticket_uuid.size(), 0); // Query database. int valid=0; int enabled=0; int groupsenabled=0; int ret = sqlite3_step(statement); if(ret==SQLITE_ROW) { valid=sqlite3_column_int(statement, 0); enabled=sqlite3_column_int(statement, 1); groupsenabled=sqlite3_column_int(statement, 2); tret.ticket_id=sqlite3_column_int(statement, 3); tret.user_id=sqlite3_column_int(statement, 4); } sqlite3_finalize(statement); tret.valid = (valid==1 && enabled==1 && groupsenabled==1); return tret; } int Snoop::store_ticket(std::string ticket_uuid, int user_id, bool valid) { assert(auth_db!=0); std::ostringstream ss; sqlite3_stmt *statement=0; ss << "insert into tickets (user_id, ticket_uuid, valid) values (?, ?, ?)"; int res = sqlite3_prepare(auth_db, ss.str().c_str(), -1, &statement, NULL); if(res!=SQLITE_OK) throw std::logic_error("Snoop::store_ticket: sqlite3_prepare failed error "+std::to_string(res)); sqlite3_bind_int(statement, 1, user_id); sqlite3_bind_text(statement, 2, ticket_uuid.c_str(), ticket_uuid.size(), 0); sqlite3_bind_int(statement, 3, valid?1:0); res = sqlite3_step(statement); if(res!=SQLITE_DONE) throw std::logic_error("Snoop::store_ticket: sqlite3_step failed error "+std::to_string(res)); sqlite3_finalize(statement); return sqlite3_last_insert_rowid(auth_db); } bool Snoop::store_app_entry(Snoop::AppEntry& app) { assert(db!=0); std::lock_guard lock(sqlite_mutex); return store_app_entry_without_lock(app); } bool Snoop::store_app_entry_without_lock(Snoop::AppEntry& app) { // Do we already have a record with this uuid? sqlite3_stmt *testq=0; int testq_res = sqlite3_prepare(db, "select count(*) from runs where uuid=?", -1, &testq, NULL); if(testq_res!=SQLITE_OK) throw std::logic_error("Snoop::store_app_entry_without_lock: failed to test for row presence"); sqlite3_bind_text(testq, 1, app.uuid.c_str(), app.uuid.size(), 0); sqlite3_step(testq); int64_t num = sqlite3_column_int64(testq, 0); sqlite3_finalize(testq); if(num>0) return false; // No entry yet, we need to store it. sqlite3_stmt *statement=0; int res = sqlite3_prepare(db, "insert into runs (uuid, create_millis, receive_millis, pid, ip_address, machine_id, " "app_name, app_version, user_id, server_status, create_timezone) " "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", -1, &statement, NULL); if(res==SQLITE_OK) { sqlite3_bind_text(statement, 1, app.uuid.c_str(), app.uuid.size(), 0); sqlite3_bind_int64(statement, 2, app.create_millis); sqlite3_bind_int64(statement, 3, app.receive_millis); sqlite3_bind_int64(statement, 4, app.pid); sqlite3_bind_text(statement, 5, app.ip_address.c_str(), app.ip_address.size(), 0); sqlite3_bind_text(statement, 6, app.machine_id.c_str(), app.machine_id.size(), 0); sqlite3_bind_text(statement, 7, app.app_name.c_str(), app.app_name.size(), 0); sqlite3_bind_text(statement, 8, app.app_version.c_str(), app.app_version.size(), 0); sqlite3_bind_text(statement, 9, app.user_id.c_str(), app.user_id.size(), 0); sqlite3_bind_int(statement, 10, app.server_status); // sqlite3_bind_text(statement, 11, app.uuid.c_str(), app.uuid.size(), 0); sqlite3_bind_int(statement, 11, app.create_timezone); sqlite3_step(statement); sqlite3_finalize(statement); app.id = sqlite3_last_insert_rowid(db); return true; } else { throw std::logic_error("Failed to prepare insertion"); } } bool Snoop::store_log_entry(Snoop::LogEntry& log_entry, bool avoid_server_duplicates) { assert(db!=0); std::lock_guard lock(sqlite_mutex); if(false && avoid_server_duplicates) { // This can take 0.3s on a 15Gb database. We cannot afford that. // Do we already have a record with this client_log_id and id? if(testq_statement==0) { int testq_res = sqlite3_prepare(db, "select exists (select 1 from (select client_log_id from logs where id=?) where client_log_id=?)", -1, &testq_statement, NULL); if(testq_res!=SQLITE_OK) throw std::logic_error("Snoop::store_log_entry_without_lock: failed to prepare query for duplicate check"); } std::cerr << log_entry.id << ", " << log_entry.client_log_id << std::endl; sqlite3_bind_int64(testq_statement, 1, log_entry.id); sqlite3_bind_int64(testq_statement, 2, log_entry.client_log_id); sqlite3_step(testq_statement); int64_t num = sqlite3_column_int64(testq_statement, 0); sqlite3_reset(testq_statement); if(num>0) return false; } // Need to store this entry. auto duration = std::chrono::system_clock::now().time_since_epoch(); log_entry.receive_millis = std::chrono::duration_cast(duration).count(); int res=SQLITE_OK; if(insert_statement==0) { res=sqlite3_prepare_v2(db, "insert into logs " "(client_log_id, id, create_millis, receive_millis, loc_file, loc_line, loc_method, " " type, message, server_status, session_uuid, create_timezone) " "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", -1, &insert_statement, NULL); } if(res==SQLITE_OK ) { sqlite3_bind_int(insert_statement, 1, log_entry.client_log_id); sqlite3_bind_int(insert_statement, 2, log_entry.id); sqlite3_bind_int64(insert_statement, 3, log_entry.create_millis); sqlite3_bind_int64(insert_statement, 4, log_entry.receive_millis); sqlite3_bind_text(insert_statement, 5, log_entry.loc_file.c_str(), log_entry.loc_file.size(), 0); sqlite3_bind_int(insert_statement, 6, log_entry.loc_line); sqlite3_bind_text(insert_statement, 7, log_entry.loc_method.c_str(), log_entry.loc_method.size(), 0); sqlite3_bind_text(insert_statement, 8, log_entry.type.c_str(), log_entry.type.size(), 0); sqlite3_bind_text(insert_statement, 9, log_entry.message.c_str(), log_entry.message.size(), 0); sqlite3_bind_int(insert_statement, 10, log_entry.server_status); sqlite3_bind_text(insert_statement, 11, log_entry.session_uuid.c_str(), log_entry.session_uuid.size(), 0); sqlite3_bind_int(insert_statement, 12, log_entry.create_timezone); sqlite3_step(insert_statement); log_entry.log_id = sqlite3_last_insert_rowid(db); sqlite3_reset(insert_statement); return true; } else { throw std::logic_error("Failed to insert log entry"); } } bool Snoop::store_auth_attempt_entry(int user_id, int ticket_id, int valid, std::string msg) { std::lock_guard lock(sqlite_mutex); sqlite3_stmt *statement=0; int res = sqlite3_prepare(auth_db, "insert into auth_attempts (time_millis, user_id, ticket_id, success, msg) " "values (?, ?, ?, ?, ?)", -1, &statement, NULL); auto duration = std::chrono::system_clock::now().time_since_epoch(); uint64_t time_millis = std::chrono::duration_cast(duration).count(); if(res==SQLITE_OK) { sqlite3_bind_int64(statement, 1, time_millis); sqlite3_bind_int(statement, 2, user_id); sqlite3_bind_int(statement, 3, ticket_id); sqlite3_bind_int(statement, 4, valid); sqlite3_bind_text(statement, 5, msg.c_str(), msg.size(), 0); sqlite3_step(statement); sqlite3_finalize(statement); return true; } else { throw std::logic_error("Failed to prepare insertion for auth_attempts"); } } bool Snoop::store_payload_entry(Snoop::PayLoad& payload) { assert(payload_db!=0); std::lock_guard lock(sqlite_mutex); // Payload can be large, so we always first check if we have this entry already. sqlite3_stmt *testq=0; int testq_res = sqlite3_prepare(payload_db, "select count(*) from payload where client_payload_id=? and id=? and client_payload_id!=-1", -1, &testq, NULL); if(testq_res!=SQLITE_OK) throw std::logic_error("Snoop::store_payload_entry_without_lock: failed to test for row presence"); sqlite3_bind_int64(testq, 1, payload.client_payload_id); sqlite3_bind_int64(testq, 2, payload.id); sqlite3_step(testq); int64_t num = sqlite3_column_int64(testq, 0); sqlite3_finalize(testq); if(num>0) { SNOOPDEBUG( std::cerr << "Already have payload with client_payload_id=" << payload.client_payload_id << " and id=" << payload.id << std::endl; ); return false; } // Need to store this entry. SNOOPDEBUG( std::cerr << "Storing payload entry" << std::endl; ); int res=SQLITE_OK; if(payload_insert_statement==0) { res=sqlite3_prepare_v2(payload_db, "insert into payload " "(client_payload_id, id, create_millis, receive_millis, payload, server_status, create_timezone) " "values (?, ?, ?, ?, ?, ?, ?)", -1, &payload_insert_statement, NULL); } if(res==SQLITE_OK ) { sqlite3_bind_int(payload_insert_statement, 1, payload.client_payload_id); sqlite3_bind_int(payload_insert_statement, 2, payload.id); sqlite3_bind_int64(payload_insert_statement, 3, payload.create_millis); sqlite3_bind_int64(payload_insert_statement, 4, payload.receive_millis); sqlite3_bind_text(payload_insert_statement, 5, payload.payload.c_str(), payload.payload.size(), 0); sqlite3_bind_int(payload_insert_statement, 6, payload.server_status); sqlite3_bind_int(payload_insert_statement, 7, payload.create_timezone); sqlite3_step(payload_insert_statement); payload.payload_id = sqlite3_last_insert_rowid(db); sqlite3_reset(payload_insert_statement); return true; } else { throw std::logic_error("Failed to insert payload entry"); } } void Snoop::start_websocket_client() { SNOOPDEBUG( std::cerr << "Snoop: attempting open" << std::endl; ); { std::unique_lock lock(connection_mutex); connection_attempt_failed=false; //std::cerr << "Snoop: attempting open" << std::endl; } SNOOPDEBUG( std::cerr << "Snoop: got connection_mutex" << std::endl; ); // wsclient.start_perpetual(); // Start I/O thread. All actual connection work runs there, // and is also re-started there on connection failure/loss. wsclient_thread=std::thread([this]{ io_thread_run(); SNOOPDEBUG( std::cerr << "Snoop: client stopped running, thread will end" << std::endl; ); }); } void Snoop::try_connect() { // This code runs in the separate thread which handles I/O. wsclient.set_connect_handler(std::bind(&Snoop::on_client_open, this)); wsclient.set_fail_handler(std::bind(&Snoop::on_client_fail, this, std::placeholders::_1)); wsclient.set_close_handler(std::bind(&Snoop::on_client_close, this)); wsclient.set_message_handler(std::bind(&Snoop::on_client_message, this, std::placeholders::_1)); std::string uri = server_; SNOOPDEBUG( std::cerr << "Snoop: uri = " << uri << std::endl; ); // Attempt the connection. Failure is reported on the `on_fail` handler. wsclient.connect(uri); // FIXME: we need to retry the connection, and handle when the connection // is down!!! } void Snoop::io_thread_run() { // This code runs in the separate thread which handles I/O. SNOOPDEBUG( std::cerr << "Snoop: I/O thread starting." << std::endl; ); try_connect(); wsclient.run(); SNOOPDEBUG( std::cerr << "Snoop: I/O thread up and running." << std::endl; ); } void Snoop::sync_with_server(bool from_wsthread) { assert(server_.size()>0); if(!from_wsthread) if(!connection_is_open) return; sync_runs_with_server(from_wsthread); sync_logs_with_server(from_wsthread); sync_payloads_with_server(from_wsthread); } void Snoop::sync_runs_with_server(bool from_wsthread) { if(!from_wsthread) if(!connection_is_open) return; std::lock_guard lock(sqlite_mutex); // Create a JSON text with all locally stored entries which have // a server_status field negative or zero. sqlite3_stmt *statement=0; std::ostringstream ssc; std::ostringstream pack; pack << "{ \"run\": ["; ssc << "select id, uuid, create_millis, receive_millis, pid, ip_address, machine_id, app_name, app_version, user_id, server_status, create_timezone " << "from runs where server_status=0"; int sres = sqlite3_prepare(db, ssc.str().c_str(), -1, &statement, NULL); if(sres==SQLITE_OK) { bool first=true; bool go=true; while(go) { int ret = sqlite3_step(statement); switch(ret) { case SQLITE_BUSY: throw std::logic_error("Unexpected SQLITE_BUSY in sync_runs_with_server"); break; case SQLITE_ROW: { Snoop::AppEntry ae; // FIXME: isolate this in a separate function so we can fetch individual records more easily ae.id = sqlite3_column_int(statement, 0); ae.uuid = safestring(sqlite3_column_text(statement, 1)); ae.create_millis = sqlite3_column_int64(statement, 2); ae.receive_millis = sqlite3_column_int64(statement, 3); ae.pid = sqlite3_column_int(statement, 4); ae.ip_address = safestring(sqlite3_column_text(statement, 5)); ae.machine_id = safestring(sqlite3_column_text(statement, 6)); ae.app_name = safestring(sqlite3_column_text(statement, 7)); ae.app_version = safestring(sqlite3_column_text(statement, 8)); ae.user_id = safestring(sqlite3_column_text(statement, 9)); ae.server_status = sqlite3_column_int(statement, 10); ae.create_timezone = sqlite3_column_int(statement, 11); if(!first) pack << ", \n"; else first=false; pack << ae.to_json(false); break; } case SQLITE_DONE: { go=false; break; } } } } pack << "] }"; sqlite3_finalize(statement); // Before we upload, decrease the server_status flag of all these // rows to indicate that we have started an attempt to get the info // to the server. ssc.str(""); ssc << "update runs set server_status=server_status-1 where server_status=0"; sres = sqlite3_prepare(db, ssc.str().c_str(), -1, &statement, NULL); if(sres==SQLITE_OK) { sqlite3_step(statement); sqlite3_finalize(statement); } else { sqlite3_finalize(statement); return; } // Upload to the server. SNOOPDEBUG( std::cerr << "Snoop::sync_logs_with_server: " << pack.str() << std::endl; ) wsclient.send(pack.str()); } void Snoop::sync_logs_with_server(bool from_wsthread) { if(!from_wsthread) if(!connection_is_open) return; std::lock_guard lock(sqlite_mutex); // Create a JSON text with all locally stored entries which have // a server_status field negative or zero. sqlite3_stmt *statement=0; std::ostringstream ssc; std::ostringstream pack; pack << "{ \"log\": ["; ssc << "select log_id, client_log_id, id, create_millis, loc_file, loc_line, loc_method, type, message, server_status, session_uuid, create_timezone " << "from logs where server_status=0"; if(local_types.size()>0) { ssc << " and type not in ("; bool first=true; for(const auto& lt: local_types) { if(first) first=false; else ssc << ", "; // FIXME: sql injection! ssc << "'" << lt << "'"; } ssc << ")"; } int sres = sqlite3_prepare(db, ssc.str().c_str(), -1, &statement, NULL); if(sres==SQLITE_OK) { bool first=true; bool go=true; while(go) { int ret = sqlite3_step(statement); switch(ret) { case SQLITE_BUSY: throw std::logic_error("Unexpected SQLITE_BUSY in sync_runs_with_server"); break; case SQLITE_ROW: { Snoop::LogEntry le; le.log_id = sqlite3_column_int(statement, 0); le.client_log_id = sqlite3_column_int(statement, 1); le.id = sqlite3_column_int(statement, 2); le.uuid = this_app_.uuid; // FIXME: this is wrong, we may still have log entries from a previous run! // See 'sync_payload_with_server' in Snoop.java for the query to handle this properly. le.create_millis = sqlite3_column_int64(statement, 3); le.loc_file = safestring(sqlite3_column_text(statement, 4)); le.loc_line = sqlite3_column_int(statement, 5); le.loc_method = safestring(sqlite3_column_text(statement, 6)); le.type = safestring(sqlite3_column_text(statement, 7)); le.message = safestring(sqlite3_column_text(statement, 8)); le.server_status = sqlite3_column_int(statement, 9); le.session_uuid = safestring(sqlite3_column_text(statement, 10)); le.create_timezone = sqlite3_column_int(statement, 11); if(!first) pack << ", \n"; else first=false; pack << le.to_json(false); break; } case SQLITE_DONE: { go=false; break; } } } } pack << "] }"; sqlite3_finalize(statement); // Before we upload, decrease the server_status flag of all these // rows to indicate that we have started an attempt to get the info // to the server. ssc.str(""); ssc << "update logs set server_status=server_status-1 where server_status=0"; sres = sqlite3_prepare(db, ssc.str().c_str(), -1, &statement, NULL); if(sres==SQLITE_OK) { sqlite3_step(statement); sqlite3_finalize(statement); } else { sqlite3_finalize(statement); return; } // Upload to the server. SNOOPDEBUG( std::cerr << "Snoop::sync_logs_with_server: " << pack.str() << std::endl; ) wsclient.send(pack.str()); } bool Snoop::is_connected() const { return connection_is_open; } void Snoop::sync_payloads_with_server(bool from_wsthread) { if(!from_wsthread) if(!connection_is_open) return; std::lock_guard lock(sqlite_mutex); // Create a JSON text with all locally stored entries which have // a server_status field negative or zero. // std::cerr << "Syncing payloads" << std::endl; sqlite3_stmt *statement=0; std::ostringstream ssc; std::ostringstream pack; pack << "{ \"payload\": ["; ssc << "select payload_id, client_payload_id, id, create_millis, payload, server_status, create_timezone " << "from payload where server_status=0"; int sres = sqlite3_prepare(payload_db, ssc.str().c_str(), -1, &statement, NULL); if(sres==SQLITE_OK) { bool first=true; bool go=true; while(go) { int ret = sqlite3_step(statement); switch(ret) { case SQLITE_BUSY: throw std::logic_error("Unexpected SQLITE_BUSY in sync_payloads_with_server"); break; case SQLITE_ROW: { Snoop::PayLoad payload; payload.uuid = this_app_.uuid; // FIXME: this is wrong, we may still have log entries from a previous run! payload.payload_id = sqlite3_column_int(statement, 0); payload.client_payload_id= sqlite3_column_int(statement, 1); payload.id = sqlite3_column_int(statement, 2); payload.create_millis = sqlite3_column_int64(statement, 3); payload.payload = safestring(sqlite3_column_text(statement, 4)); payload.server_status = sqlite3_column_int(statement, 5); payload.create_timezone = sqlite3_column_int(statement, 6); if(!first) pack << ", \n"; else first=false; pack << payload.to_json(false); break; } case SQLITE_DONE: { go=false; break; } } } } else { throw std::logic_error("Failed to prepare statement for payload select"); } pack << "] }"; sqlite3_finalize(statement); // Before we upload, decrease the server_status flag of all these // rows to indicate that we have started an attempt to get the info // to the server. ssc.str(""); ssc << "update payload set server_status=server_status-1 where server_status=0"; sres = sqlite3_prepare(payload_db, ssc.str().c_str(), -1, &statement, NULL); if(sres==SQLITE_OK) { sqlite3_step(statement); sqlite3_finalize(statement); } else { sqlite3_finalize(statement); return; } // std::cerr << "Syncing payloads almost done" << std::endl; // Upload to the server. wsclient.send(pack.str()); } std::vector Snoop::get_app_registrations(std::string uuid_filter) { Stopwatch sw; sw.start(); std::lock_guard lock(sqlite_mutex); sqlite3_stmt *statement=0; std::ostringstream ssc; ssc << "select id, uuid, create_millis, receive_millis, pid, ip_address, machine_id, " "app_name, app_version, user_id, server_status, create_timezone from runs"; if(uuid_filter.size()>0) ssc << " where uuid=?"; int sres = sqlite3_prepare(db, ssc.str().c_str(), -1, &statement, NULL); if(uuid_filter.size()>0) sqlite3_bind_text(statement, 1, uuid_filter.c_str(), uuid_filter.size(), 0); std::vector entries; if(sres==SQLITE_OK) { bool go=true; while(go) { int ret = sqlite3_step(statement); switch(ret) { case SQLITE_ROW: { Snoop::AppEntry ae; ae.id = sqlite3_column_int(statement, 0); ae.uuid = safestring(sqlite3_column_text(statement, 1)); ae.create_millis = sqlite3_column_int64(statement, 2); ae.receive_millis = sqlite3_column_int64(statement, 3); ae.pid = sqlite3_column_int(statement, 4); ae.ip_address = safestring(sqlite3_column_text(statement, 5)); ae.machine_id = safestring(sqlite3_column_text(statement, 6)); ae.app_name = safestring(sqlite3_column_text(statement, 7)); ae.app_version = safestring(sqlite3_column_text(statement, 8)); ae.user_id = safestring(sqlite3_column_text(statement, 9)); ae.server_status = sqlite3_column_int(statement, 10); ae.create_timezone = sqlite3_column_int(statement, 11); entries.push_back(ae); break; } case SQLITE_DONE: { go=false; break; } } } } else throw std::logic_error("Failed to prepare statement for get_app_registrations"); sqlite3_finalize(statement); sw.stop(); // This is now fast because we have added an index on `uuid`. // std::cerr << "get_app_registrations: " << uuid_filter << sw << std::endl; return entries; } void Snoop::on_client_open() { SNOOPDEBUG( std::cerr << "Snoop: connection to " << server_ << " open " << std::this_thread::get_id() << std::endl; ); sync_with_server(true); std::unique_lock lock(connection_mutex); connection_is_open=true; connection_attempt_failed=false; connection_cv.notify_all(); SNOOPDEBUG( std::cerr << "Snoop: synced after open, ready to roll" << std::endl; ); } void Snoop::on_client_fail(const boost::beast::error_code& ec) { // This runs in the I/O thread. std::string error_reason = ec.message(); SNOOPDEBUG( std::cerr << "Snoop: connection failed: " << error_reason << std::endl; ); // Clients may be waiting for the connection to open, but we may // never get to that stage. Signal them to move on. std::unique_lock lock(connection_mutex); connection_attempt_failed=true; connection_cv.notify_all(); // Run the callback bool retry=true; if(error_callback) { retry = error_callback(error_reason); if(authentication_callback) authentication_callback("", false); } // Attempt to re-open. if(retry && this_app_.app_name!="SnoopServer") { SNOOPDEBUG( std::cerr << "Snoop: sleeping a little before attempting reconnect" << std::endl; ); sleep(3); SNOOPDEBUG( std::cerr << "Snoop: attempting to re-open connection" << std::endl; ); try_connect(); } } void Snoop::on_client_close() { // Clients may be waiting for the connection to open, but we may // never get to that stage. Signal them to move on. { std::unique_lock lock(connection_mutex); connection_is_open=false; connection_attempt_failed=true; connection_cv.notify_all(); SNOOPDEBUG( std::cerr << "Snoop: connection closed" << std::endl; ); } // Attempt to re-open. if(this_app_.app_name!="SnoopServer") { SNOOPDEBUG( std::cerr << "Snoop: attempting to re-open connection" << std::endl; ); try_connect(); } } void Snoop::on_client_message(const std::string& msg) { nlohmann::json root; // std::cerr << msg->get_payload() << std::endl; try { root=nlohmann::json::parse(msg); } catch(nlohmann::json::exception& ex) { SNOOPDEBUG( std::cerr << "Snoop::on_client_message: Cannot parse LogEntry from JSON: " << ex.what() << std::endl; ); return; } // Determine what type of message this is, and take corresponding action. std::lock_guard lock(sqlite_mutex); try { if(root.count("log_stored")>0) { // Mark all log entries for which we have received confirmation from the // server that they have been stored with 'server_status=1', to prevent // us from re-uploading them again. SNOOPDEBUG( std::cerr << "server ack'ed storing log entries " << root["log_stored"] << std::endl; ); sqlite3_exec(db, "BEGIN TRANSACTION", NULL, NULL, NULL); sqlite3_stmt *statement=0; std::ostringstream ssc; ssc << "update logs set server_status=1 where log_id=?"; int ret = sqlite3_prepare(db, ssc.str().c_str(), -1, &statement, NULL); if(ret!=SQLITE_OK) throw std::logic_error("Failed to prepare statement for on_client_message"); const auto& entries=root["log_stored"]; for(auto entry: entries) { sqlite3_bind_int(statement, 1, entry.get() ); sqlite3_step(statement); sqlite3_reset(statement); } sqlite3_finalize(statement); sqlite3_exec(db, "END TRANSACTION", NULL, NULL, NULL); } if(root.count("app_stored")>0) { // Mark all app entries for which we have received confirmation from the // server that they have been stored with 'server_status=1', to prevent // us from re-uploading them again. SNOOPDEBUG( std::cerr << "server ack'ed storing run entries " << root["app_stored"] << std::endl; ); sqlite3_exec(db, "BEGIN TRANSACTION", NULL, NULL, NULL); sqlite3_stmt *statement=0; std::ostringstream ssc; ssc << "update runs set server_status=1 where id=?"; int ret = sqlite3_prepare(db, ssc.str().c_str(), -1, &statement, NULL); if(ret!=SQLITE_OK) throw std::logic_error("Failed to prepare statement for on_client_message"); const auto& entries=root["app_stored"]; for(auto entry: entries) { sqlite3_bind_int(statement, 1, entry.get()); sqlite3_step(statement); sqlite3_reset(statement); } sqlite3_finalize(statement); sqlite3_exec(db, "END TRANSACTION", NULL, NULL, NULL); } if(root.count("payload_stored")>0) { // Mark all payload entries for which we have received confirmation from the // server that they have been stored with 'server_status=1', to prevent // us from re-uploading them again. sqlite3_exec(payload_db, "BEGIN TRANSACTION", NULL, NULL, NULL); sqlite3_stmt *statement=0; std::ostringstream ssc; ssc << "update payload set server_status=1 where id=?"; int ret = sqlite3_prepare(payload_db, ssc.str().c_str(), -1, &statement, NULL); if(ret!=SQLITE_OK) throw std::logic_error("Failed to prepare statement for on_client_message"); const auto& entries=root["payload_stored"]; for(auto entry: entries) { sqlite3_bind_int(statement, 1, entry.get()); sqlite3_step(statement); sqlite3_reset(statement); } sqlite3_finalize(statement); sqlite3_exec(payload_db, "END TRANSACTION", NULL, NULL, NULL); } if(root.count("authenticate")>0) { SNOOPDEBUG( std::cerr << "Received authentication response message from server." << std::endl; ); const auto& auth=root["authenticate"]; std::string ticket_uuid = auth.value("ticket_uuid", ""); bool valid = auth.value("valid", false); if(valid) set_local_ticket(ticket_uuid); else set_local_ticket(""); authentication_callback(ticket_uuid, valid); authentication_callback=nullptr; } } catch(nlohmann::json::exception& ex) { SNOOPDEBUG( std::cerr << "Discarding message, JSON malformed: " << ex.what() << std::endl; ); } } Snoop::~Snoop() { // If a program runs for only a very short time, the connection to the logging // server may not be open yet when we reach this destructor. In that case, // we will want to wait for the connection to open, so that we can do a final // sync before we terminate. // However, if the connection was attempted but failed (e.g. no server), we // can skip all that. // std::cerr << "|" << this_app_.app_name << "|" << std::endl; std::unique_lock lock(connection_mutex); // std::cerr << "unlocked" << std::endl; if(this_app_.app_name!="" && this_app_.app_name!="SnoopServer") { if(connection_is_open==false && connection_attempt_failed==false) { connection_cv.wait(lock); } sync_with_server(); if(db!=0) { // If the database is not open the wsclient won't be running either wsclient.stop(); wsclient_thread.join(); } } if(insert_statement!=0) { sqlite3_finalize(insert_statement); } if(id_for_uuid_statement!=0) { sqlite3_finalize(id_for_uuid_statement); } if(db!=0) { sqlite3_close(db); db=0; } if(payload_db!=0) { sqlite3_close(payload_db); payload_db=0; } } void Snoop::set_local_type(const std::string& t) { local_types.insert(t); } Snoop& Snoop::operator()(const std::string& type, std::string fl, int loc, std::string method) { std::lock_guard lock(call_mutex); assert(this_app_.app_name.size()>0); if(type!="") this_log_.type=type; this_log_.loc_file=fl; this_log_.loc_line=loc; this_log_.loc_method=method; return *this; } Snoop& Snoop::operator<<(const Flush&) { // SNOOPDEBUG(std::cerr << "================= Flush" << std::endl;) std::lock_guard lock(call_mutex); assert(this_app_.app_name.size()>0); // Fill in the remaining fields of the LogEntry to be stored/sent. auto duration = std::chrono::system_clock::now().time_since_epoch(); auto millis = std::chrono::duration_cast(duration).count(); this_log_.create_millis = millis; this_log_.create_timezone = local_utc_offset_minutes(); this_log_.message = out_.str(); this_log_.server_status = 0; this_log_.session_uuid = session_uuid_; store_log_entry(this_log_, false); if(sync_immediately_) sync_logs_with_server(); out_.str(""); this_log_.loc_file=""; this_log_.loc_line=-1; this_log_.loc_method=""; this_log_.type=""; this_log_.message=""; return *this; } Snoop& Snoop::payload(const std::vector& data) { std::lock_guard lock(call_mutex); assert(this_app_.app_name.size()>0); // Fill in the remaining fields of the PayLoad to be stored/sent. Snoop::PayLoad pl(data); auto duration = std::chrono::system_clock::now().time_since_epoch(); auto millis = std::chrono::duration_cast(duration).count(); pl.create_millis = millis; pl.create_timezone = local_utc_offset_minutes(); pl.server_status = 0; store_payload_entry(pl); sync_payloads_with_server(); return *this; } Snoop::LogEntry::LogEntry() : log_id(0), client_log_id(0), id(0), create_millis(0), receive_millis(0), loc_line(0), server_status(0), create_timezone(-1) { } Snoop::LogEntry::LogEntry(int log_id_, int client_log_id_, int c1, const std::string& c1b, uint64_t c2, uint64_t c2b, const std::string& c3, int c4, const std::string& c5, const std::string& c6, const std::string& c7, int c8, const std::string& c9, int create_timezone_) : log_id(log_id_), client_log_id(client_log_id_), id(c1), uuid(c1b), create_millis(c2), receive_millis(c2b), loc_file(c3), loc_line(c4), loc_method(c5), type(c6), message(c7), server_status(c8), session_uuid(c9), create_timezone(create_timezone_) { } std::string Snoop::LogEntry::to_json(bool human_readable) const { nlohmann::json json; json["log_id"]=log_id; json["client_log_id"]=client_log_id; json["id"]=id; json["uuid"]=uuid; if(human_readable) { time_t tt = std::chrono::system_clock::to_time_t(u64_to_time(create_millis)); tm utc_tm = *localtime(&tt); std::ostringstream str; str << std::setfill('0') << std::setw(2) << utc_tm.tm_hour << ":" << std::setw(2) << utc_tm.tm_min << ":" << std::setw(2) << utc_tm.tm_sec; json["time"]=str.str(); str.str(""); str << std::setw(2) << utc_tm.tm_mday << "/" << std::setw(2) << utc_tm.tm_mon+1 << "/" << std::setw(4) << utc_tm.tm_year+1900; json["date"]=str.str(); } json["create_millis"]=create_millis; json["create_timezone"]=create_timezone; json["receive_millis"]=receive_millis; json["loc_file"]=loc_file; json["loc_line"]=loc_line; json["loc_method"]=loc_method; json["type"]=type; json["message"]=message; json["server_status"]=server_status; json["session_uuid"]=session_uuid; std::ostringstream str; str << json; // SNOOPDEBUG( std::cerr << "Snoop::LogEntry::to_json: " << str.str() << std::endl; ) return str.str(); } Snoop::AppEntry::AppEntry() : id(0), create_millis(0), receive_millis(0), pid(0), server_status(0), connected(false), create_timezone(-1) { } Snoop::AppEntry::AppEntry(const std::string& uuid_, uint64_t create_millis_, uint64_t receive_millis_, uint64_t pid_, const std::string& ip_address_, const std::string& machine_id_, const std::string& app_name_, const std::string& app_version_, const std::string& user_id_, int server_status_, int create_timezone_) : uuid(uuid_), create_millis(create_millis_), receive_millis(receive_millis_), pid(pid_), ip_address(ip_address_), machine_id(machine_id_), app_name(app_name_), app_version(app_version_), user_id(user_id_), server_status(server_status_), create_timezone(create_timezone_) { } std::string Snoop::AppEntry::to_json(bool human_readable) const { std::ostringstream str; str << "{ \"id\": " << id << ", \"uuid\": \"" << uuid << "\""; if(human_readable) { time_t tt = std::chrono::system_clock::to_time_t(u64_to_time(create_millis)); tm utc_tm = *localtime(&tt); str << ", \"time\": \"" << std::setfill('0') << std::setw(2) << utc_tm.tm_hour << ":" << std::setw(2) << utc_tm.tm_min << ":" << std::setw(2) << utc_tm.tm_sec << "\"" << ", \"date\": \"" << std::setw(2) << utc_tm.tm_mday << "/" << std::setw(2) << utc_tm.tm_mon+1 << "/" << std::setw(4) << utc_tm.tm_year+1900 << "\""; } str << ", \"create_millis\": " << create_millis << ", \"create_timezone\": " << create_timezone << ", \"receive_millis\": " << receive_millis << ", \"pid\": " << pid << ", \"ip_address\": \"" << ip_address << "\"" << ", \"machine_id\": \"" << machine_id << "\"" << ", \"app_name\": \"" << app_name << "\"" << ", \"app_version\": \"" << app_version << "\"" << ", \"user_id\": \"" << user_id << "\"" << ", \"server_status\": " << server_status << ", \"connected\": " << connected << "}"; return str.str(); } Snoop::PayLoad::PayLoad() { client_payload_id=-1; auto duration = std::chrono::system_clock::now().time_since_epoch(); receive_millis = std::chrono::duration_cast(duration).count(); } Snoop::PayLoad::PayLoad(const std::vector& data) : PayLoad() { payload=snoop_base64_encode(reinterpret_cast(data.data()), data.size()); } std::string Snoop::PayLoad::to_json(bool) const { nlohmann::json json; json["payload_id"]=payload_id; json["id"]=id; json["create_millis"]=create_millis; json["create_timezone"]=create_timezone; json["receive_millis"]=receive_millis; json["payload"]=payload; json["server_status"]=server_status; json["uuid"]=uuid; std::ostringstream str; str << json; return str.str(); } void Snoop::LogEntry::from_json(const nlohmann::json& entry) { // std::cerr << "logentry: " << entry << std::endl; if(entry.count("log_id")==0 || entry.count("id")==0 || entry.count("uuid")==0 || entry.count("type")==0 || entry.count("message")==0) { throw ParseError("Incomplete log entry"); } try { log_id = entry["log_id"].get(); id = entry["id"].get(); uuid = entry["uuid"].get(); if(entry.count("create_millis")>0) create_millis = entry["create_millis"].get(); loc_file = entry.value("loc_file", ""); loc_line = entry.value("loc_line", 0); loc_method = entry.value("loc_method", ""); type = entry["type"].get(); message = entry.value("message", ""); server_status = entry.value("server_status", 0); session_uuid = entry.value("session_uuid", ""); create_timezone = entry.value("create_timezone", -1); } catch(nlohmann::json::exception& ex) { SNOOPDEBUG( std::cerr << "Snoop::LogEntry::from_json: " << ex.what() << std::endl; ); } // std::cerr << "LogEntry parsed" << std::endl; } void Snoop::AppEntry::from_json(const nlohmann::json& entry) { // std::cerr << entry.dump() << std::endl; try { id = entry["id"].get(); uuid = entry["uuid"].get(); create_millis = entry["create_millis"].get(); if(entry.count("pid")>0) pid = entry["pid"].get(); if(entry.count("ip_address")>0) ip_address = entry["ip_address"].get(); machine_id = entry["machine_id"].get(); app_name = entry["app_name"].get(); app_version = entry["app_version"].get(); if(entry.count("user_id")>0) user_id = entry["user_id"].get(); if(entry.count("server_status")>0) server_status = entry["server_status"].get(); create_timezone = entry.value("create_timezone", -1); } catch(nlohmann::json::exception& ex) { SNOOPDEBUG( std::cerr << "Snoop::AppEntry::from_json: " << ex.what() << std::endl; ); } // std::cerr << "AppEntry parsed" << std::endl; } void Snoop::PayLoad::from_json(const nlohmann::json& entry) { try { payload_id = entry["payload_id"].get(); id = entry["id"].get(); create_millis = entry["create_millis"].get(); payload = entry["payload"].get(); server_status = entry["server_status"].get(); create_timezone = entry.value("create_timezone", -1); } catch(nlohmann::json::exception& ex) { SNOOPDEBUG( std::cerr << "Snoop::LogEntry::from_json: " << ex.what() << std::endl; ); } } bool Snoop::add_user(std::string user, std::string password, bool single) { std::lock_guard lock(sqlite_mutex); assert(auth_db!=0); #ifdef ENCRYPT_PASSWORDS char outbuf[SCRYPT_MCF_LEN+1]; // std::cerr << SCRYPT_N << ", " << SCRYPT_r << ", " << SCRYPT_p << ", " << SCRYPT_MCF_LEN << std::endl; libscrypt_hash(outbuf, password.c_str(), SCRYPT_N, SCRYPT_r, SCRYPT_p); #else const char *outbuf = password.c_str(); #endif // If `single`, flush the database. if(single) { int res = sqlite3_exec(auth_db, "delete from users", NULL, NULL, NULL); if(res!=SQLITE_OK) { SNOOPDEBUG( std::cerr << "Snoop::add_user: problem erasing users table." << std::endl; ) } } sqlite3_stmt *statement=0; std::ostringstream ss; ss << "insert into users (username, password) values (?,?)"; int res = sqlite3_prepare(auth_db, ss.str().c_str(), -1, &statement, NULL); assert(res==SQLITE_OK); sqlite3_bind_text(statement, 1, user.c_str(), user.size(), 0); sqlite3_bind_text(statement, 2, outbuf, strlen(outbuf), 0); res=sqlite3_step(statement); sqlite3_finalize(statement); if(res!=SQLITE_DONE) { SNOOPDEBUG( std::cerr << "Snoop::add_user: error " << sqlite3_errmsg(db) << std::endl; ) return false; } return true; } std::string Snoop::local_user() const { std::string name=""; sqlite3_stmt *statement=0; std::ostringstream ss; int res = sqlite3_prepare(auth_db, "select username from users where username!=\"\"", -1, &statement, NULL); if(res!=SQLITE_OK) return ""; int ret = sqlite3_step(statement); if(ret==SQLITE_ROW) { name = safestring(sqlite3_column_text(statement, 0)); SNOOPDEBUG( std::cerr << "Snoop::init: localUser = " << name << std::endl; ); } else { SNOOPDEBUG( std::cerr << "Snoop::init: no localUser set (yet)" << std::endl; ); } sqlite3_finalize(statement); return name; } static const std::string base64_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz" "0123456789+/"; static inline bool is_base64(unsigned char c) { return (isalnum(c) || (c == '+') || (c == '/')); } std::string snoop_base64_encode(unsigned char const* bytes_to_encode, unsigned int in_len) { std::string ret; int i = 0; int j = 0; unsigned char char_array_3[3]; unsigned char char_array_4[4]; while (in_len--) { char_array_3[i++] = *(bytes_to_encode++); if (i == 3) { char_array_4[0] = (char_array_3[0] & 0xfc) >> 2; char_array_4[1] = ((char_array_3[0] & 0x03) << 4) + ((char_array_3[1] & 0xf0) >> 4); char_array_4[2] = ((char_array_3[1] & 0x0f) << 2) + ((char_array_3[2] & 0xc0) >> 6); char_array_4[3] = char_array_3[2] & 0x3f; for(i = 0; (i <4) ; i++) ret += base64_chars[char_array_4[i]]; i = 0; } } if (i) { for(j = i; j < 3; j++) char_array_3[j] = '\0'; char_array_4[0] = (char_array_3[0] & 0xfc) >> 2; char_array_4[1] = ((char_array_3[0] & 0x03) << 4) + ((char_array_3[1] & 0xf0) >> 4); char_array_4[2] = ((char_array_3[1] & 0x0f) << 2) + ((char_array_3[2] & 0xc0) >> 6); char_array_4[3] = char_array_3[2] & 0x3f; for (j = 0; (j < i + 1); j++) ret += base64_chars[char_array_4[j]]; while((i++ < 3)) ret += '='; } return ret; } std::string snoop_base64_decode(std::string const& encoded_string) { int in_len = encoded_string.size(); int i = 0; int j = 0; int in_ = 0; unsigned char char_array_4[4], char_array_3[3]; std::string ret; while (in_len-- && ( encoded_string[in_] != '=') && is_base64(encoded_string[in_])) { char_array_4[i++] = encoded_string[in_]; in_++; if (i ==4) { for (i = 0; i <4; i++) char_array_4[i] = base64_chars.find(char_array_4[i]); char_array_3[0] = (char_array_4[0] << 2) + ((char_array_4[1] & 0x30) >> 4); char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2); char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3]; for (i = 0; (i < 3); i++) ret += char_array_3[i]; i = 0; } } if (i) { for (j = i; j <4; j++) char_array_4[j] = 0; for (j = 0; j <4; j++) char_array_4[j] = base64_chars.find(char_array_4[j]); char_array_3[0] = (char_array_4[0] << 2) + ((char_array_4[1] & 0x30) >> 4); char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2); char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3]; for (j = 0; (j < i - 1); j++) ret += char_array_3[j]; } return ret; } ================================================ FILE: client_server/Snoop.hh ================================================ /* Snoop Copyright (C) 2015-2024 Kasper Peeters Available under the terms of the GPL v3. Snoop is a lightweight logging library which stores its log entries in a local SQLite database or on a remote server. */ #pragma once #ifndef __ANDROID__ // Android uses Java/Kotlin networking, don't attempt to do it native. #define SNOOP_SSL #endif #include #include #include #include #include #include "nlohmann/json.hpp" #include #include #include #include "websocket_client.hh" #ifndef _MSC_VER #include #endif std::string safestring(const unsigned char *c); namespace snoop { class SnoopImpl; class Flush {}; extern Flush flush; /// Logging class with functionality to send log information to a /// remote server using a websocket connection. class Snoop { public: Snoop(); ~Snoop(); /// Initialise the logging stream. Should be called once at /// program startup, but can be called multiple times without /// causing problems. void init(const std::string& app_name, const std::string& app_version, std::string server="", std::string local_log_file="", std::string machine_id=""); /// Get a string which uniquely identifies the current user. This is /// stored in ~/.config/snoop/appname.conf, and in the 'user_id' field /// in each LogEntry. Note that this is different from the 'uuid' field, /// which will change from one run to the next. std::string get_user_uuid(const std::string& app_name); /// Operator to initialise a logging entry with the type of /// the log message as well as (optionally) the file, line /// number and method. Snoop& operator()(const std::string& type="", std::string fl="", int loc=-1, std::string method=""); /// Determine the 'type' field of records which should not be /// sent to the remote logging server. Can be called multiple times. void set_local_type(const std::string& type); /// Generic operator to log an object to the log message being constructed. template Snoop& operator<<(const T& obj) { out_ <<(obj); return *this; } /// Log payload data. Snoop& payload(const std::vector&); /// Flush the log entry to disk/server. Snoop& operator<<(const Flush&); /// Set to sync with server after every log line. void set_sync_immediately(bool); /// Ensure that the local database is synchronised with the /// server (this sends multiple app or log entries in one /// websocket message). Leave the bool argument at its /// default argument under all normal circumstances. void sync_with_server(bool from_wsthread=false); /// As above, but only for run entries. void sync_runs_with_server(bool from_wsthread=false); /// As above, but only for log entries. void sync_logs_with_server(bool from_wsthread=false); /// As above, but only for payload data. void sync_payloads_with_server(bool from_wsthread=false); /// Are we connected to the log server? bool is_connected() const; /// Return version of last run seen on given device. std::string last_seen_version(std::string machine_id); /// Authentication logic; passes ticket or credentials /// to server, and registers callback function for when /// the response comes back. If the `always_reauth` flag /// is true, always ask the server for a new ticket using /// the user/password combo. bool authenticate(std::function, std::string user="", std::string pass="", bool always_reauth=false); /// Set error handler for clients; will be called on /// any networking errors. When this function returns /// false, no further attempts will be made to connect /// or send, and if authentication was in progress, a /// final call to the authentication callback will be /// made, with 'false' as argument. void set_error_handler(std::function); /// Exception used to flag invalid/unparseable data received on the wire. class ParseError : public std::logic_error { public: ParseError(const std::string&); }; /// Get status of a given authentication ticket. class Ticket { public: Ticket(); int ticket_id; int user_id; std::string ticket_uuid; bool valid; }; Ticket is_ticket_valid(std::string ticket_uuid); /// C++ representation of a run entry. class AppEntry { public: AppEntry(); AppEntry(const std::string& uuid_, uint64_t create_millis_, uint64_t receive_millis_, uint64_t pid_, const std::string& ip_address_, const std::string& machine_id_, const std::string& app_name_, const std::string& app_version_, const std::string& user_id_, int server_status_, int create_timezone); std::string to_json(bool human_readable) const; void from_json(const nlohmann::json&); int id; std::string uuid; uint64_t create_millis; uint64_t receive_millis; uint64_t pid; std::string ip_address; std::string machine_id; std::string app_name; std::string app_version; std::string user_id; int server_status; // 1: synced, 0 and negative: number of attempts at syncing made bool connected; int create_timezone; }; /// C++ representation of a log entry. class LogEntry { public: LogEntry(); LogEntry(int log_id_, int client_log_id_, int id_, const std::string&, uint64_t, uint64_t, const std::string&, int, const std::string&, const std::string& , const std::string&, int status, const std::string&, int create_timezone); std::string to_json(bool human_readable) const; void from_json(const nlohmann::json&); int log_id; int client_log_id; int id; std::string uuid; // this goes on the wire, but is not stored on disk. uint64_t create_millis; uint64_t receive_millis; std::string loc_file; int loc_line; std::string loc_method; std::string type; std::string message; int server_status; // 1: synced, 0 and negative: number of attempts at syncing made std::string session_uuid; int create_timezone; }; /// C++ representation of a payload entry. class PayLoad { public: PayLoad(); PayLoad(const std::vector& data); std::string to_json(bool human_readable) const; void from_json(const nlohmann::json&); int payload_id; int client_payload_id; int id; std::string uuid; // this goes on the wire, but is not stored on disk. uint64_t create_millis; uint64_t receive_millis; std::string payload; int server_status; // 1: synced, 0 and negative: number of attempts at syncing made int create_timezone; }; /// Client-side fetching of ticket. std::string get_local_ticket(); /// Retrieve the (single) local username from the database, empty string /// if no such row exists. std::string local_user() const; /// Set the session uuid so log entries can be easily grouped by session. void set_session_uuid(const std::string&); /// Add a user/password combo to the user database. This can also be used /// locally (client mode, not server) to save a username to persistent /// storage. If `single` is true, first flush the local user database (so /// at any time only one record is present). bool add_user(std::string user, std::string password, bool single=false); protected: /// Start the websocket client. This tries to connect to the server and then /// waits in a separate thread until the server sends us something (typically /// in response to something the main thread makes by calling wsclient.send). void start_websocket_client(); /// Ensure that the required tables are present in the /// database file. void create_tables(); /// Ensure that the required authentication tables are present /// in the authentication database. Only used on the server. void create_authentication_tables(); /// Obtain a uuid by finding the last AppEntry stored in the /// local database. Will attempt to re-turn a previously /// generated uuid but will do so only if one is stored for /// the current pid; if no entry with the current pid is /// stored then a new one will always be generated. void obtain_uuid(); /// Store an app entry in the database. Will update the 'id' /// field in the AppEntry. bool store_app_entry(Snoop::AppEntry&); bool store_app_entry_without_lock(Snoop::AppEntry&); /// Store a log entry in the local database. Generates its /// own receive_millis field (the one given gets /// overwritten). Will update the 'id' field in the LogEntry. /// Returns 'true' if the entry was stored, or 'false' if an /// entry with this client_log_id was already present (except /// when it is 0). bool store_log_entry(Snoop::LogEntry&, bool avoid_server_duplicates); /// Store payload data in the local database. bool store_payload_entry(Snoop::PayLoad&); /// Store an attempt to login into the authentication database. bool store_auth_attempt_entry(int user_id, int ticket_id, int valid, std::string msg); /// Return a vector of all aps registered in the database. If /// the uuid filter is non-empty, will filter on the given /// uuid. std::vector get_app_registrations(std::string uuid_filter=""); /// Store an authentication ticket in the database. int store_ticket(std::string ticket_uuid, int user_id, bool valid); /// Client-side storing of ticket (simpler than store_ticket above). /// If ticket is empty, only deletes current ticket. void set_local_ticket(std::string ticket_uuid); /// Variables bool sync_immediately_; sqlite3 *db, *payload_db, *auth_db; sqlite3_stmt *insert_statement, *id_for_uuid_statement, *payload_insert_statement, *testq_statement; std::recursive_mutex sqlite_mutex; private: /// Websocket client to talk to a remote logging server. websocket_client wsclient; std::thread wsclient_thread; std::mutex connection_mutex; std::condition_variable connection_cv; bool connection_is_open, connection_attempt_failed; // Main entry point for the I/O thread. void io_thread_run(); void try_connect(); void on_client_open(); void on_client_fail(const boost::beast::error_code& ec); void on_client_close(); void on_client_message(const std::string& msg); std::ostringstream out_; Snoop::AppEntry this_app_; Snoop::LogEntry this_log_; std::string server_; std::string session_uuid_; // gets copied into every LogEntry. std::recursive_mutex call_mutex; std::set local_types; std::function authentication_callback; std::function error_callback; }; extern Snoop log; const char info[] ="info"; const char warn[] ="warning"; const char error[]="error"; const char fatal[]="fatal"; const char email[]="email"; } // set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D__FILENAME__='\"$(subst // ${CMAKE_SOURCE_DIR}/,,$(abspath $<))\"'") #define LOC __FILE__, __LINE__, __func__ ================================================ FILE: client_server/TODO ================================================ Can it ever happen that e.g. the user selects a cell, clicks delete, but before the Action can be 'perform'ed, the client has already removed it? The problem is that the user interface can fire off a Delete action and then happily request other things to that cell before the delete has happened. Or can it? With calling perform, you always run on the gui thread. This thread should just execute all actions immediately, all the way down to the gui update. LATER: It's worse: you can set a cell to execute, then delete it while the cell is still running. When the return data comes in and the computethread wants to add the child output cell, the original one has gone and the pointer is stale => segfault. You can't avoid this with doing actions in sync. You can prevent it by requiring that a delete can only happen on cells which are not executing. -> maybe the best is to make 'perform' execute all the way through, and make a separate member, only accessible to the client, which does actions by waking. perform_sync(); perform_async(); -> No, let the GUI thread do all updates. The client can only request updates, the gui thread executes them. Then it becomes easy to force the gui to first run the outstanding requests before doing its own. HOWEVER, in this case we have to worry about how the client stores ActionBase. The GUIThread holds the action stack, but the NetThread can lock it and write into it. Make sure to keep the GUIBase and Client classes separating the thread actions. Some stuff should probably move out from Client -> GUIBase. Also: rename to GUIThread and NetThread. Or maybe add SharedData to separate it out even cleaner. * We are passing around all data in the form of shared ptrs, but that ignores the tree structure. Perhaps change this? ================================================ FILE: client_server/cadabra-jupyter-kernel.cc ================================================ #include "Config.hh" #include #include "cadabra-jupyter-kernel.hh" #include "xeus/xguid.hpp" #include // #define DEBUG 1 cadabra::CadabraJupyter::CadabraJupyter() : Server() { runner = std::thread(std::bind(&Server::wait_for_job, this)); pybind11::gil_scoped_release release; } void cadabra::CadabraJupyter::configure_impl() { auto handle_comm_opened = [](xeus::xcomm&& comm, const xeus::xmessage&) { std::cerr << "Comm opened for target: " << comm.target().name() << std::endl; }; comm_manager().register_comm_target("echo_target", handle_comm_opened); // using function_type = std::function; #ifdef DEBUG std::cerr << "CadabraJupyter configured" << std::endl; #endif } xjson cadabra::CadabraJupyter::execute_request_impl(int execution_counter, const std::string& code, bool silent, bool store_history, xjson /* user_expressions */, bool allow_stdin) { #ifdef DEBUG std::cerr << "Received execute_request" << std::endl; std::cerr << "execution_counter: " << execution_counter << std::endl; std::cerr << "code: " << code << std::endl; std::cerr << "silent: " << silent << std::endl; std::cerr << "store_history: " << store_history << std::endl; std::cerr << "allow_stdin: " << allow_stdin << std::endl; std::cerr << std::endl; #endif std::unique_lock lock(block_available_mutex); websocketpp::connection_hdl hdl; block_queue.push(Block(hdl, code, execution_counter)); block_available.notify_one(); // The 'wait_for_job' function which runs in a separate thread will take // care of executing the 'code'. If anything in 'code' uses 'display', // it will run the 'send' function below. At the end of the code // execution, a final output block will be sent by 'Server::on_block_finished'. xjson result; result["status"] = "ok"; return result; } void cadabra::CadabraJupyter::on_block_error(Block blk) { #ifdef DEBUG std::cerr << "error: " << blk.error << std::endl; #endif std::vector traceback; // FIXME: This does not show the error, for some reason... publish_execution_error("Exception", blk.error, traceback); xjson pub_data; pub_data["text/markdown"] = blk.error; // xjson extra_data; // extra_data["dummy"] = "dummy"; // FIXME: ... so we send it again as a message. publish_execution_result(current_id, std::move(pub_data), 0); // std::move(extra_data)); } uint64_t cadabra::CadabraJupyter::send(const std::string& output, const std::string& msg_type, uint64_t parent_id, bool last) { #ifdef DEBUG std::cerr << "received: " << msg_type << " " << output << std::endl; #endif if(output.size()>0) { if(msg_type=="verbatim" || msg_type=="output") { xjson pub_data; pub_data["text/markdown"] = output; // xjson extra_data; // extra_data["dummy"] = "dummy"; publish_execution_result(current_id, std::move(pub_data), 0); // std::move(extra_data)); } else if(msg_type=="latex_view") { xjson pub_data; std::string tmp=output; boost::replace_all(tmp, "\\begin{dmath*}", "$"); boost::replace_all(tmp, "\\end{dmath*}", "$"); pub_data["text/markdown"] = tmp; // xjson extra_data; // extra_data["dummy"] = "dummy"; publish_execution_result(current_id, std::move(pub_data), 0); // std::move(extra_data)); } } return current_id; } xjson cadabra::CadabraJupyter::complete_request_impl(const std::string& code, int cursor_pos) { #ifdef DEBUG std::cerr << "Received complete_request" << std::endl; std::cerr << "code: " << code << std::endl; std::cerr << "cursor_pos: " << cursor_pos << std::endl; std::cerr << std::endl; #endif xjson result; result["status"] = "ok"; result["matches"] = {"a.echo1"}; result["cursor_start"] = 2; result["cursor_end"] = 6; return result; } xjson cadabra::CadabraJupyter::inspect_request_impl(const std::string& code, int cursor_pos, int detail_level) { #ifdef DEBUG std::cerr << "Received inspect_request" << std::endl; std::cerr << "code: " << code << std::endl; std::cerr << "cursor_pos: " << cursor_pos << std::endl; std::cerr << "detail_level: " << detail_level << std::endl; std::cerr << std::endl; #endif xjson result; result["status"] = "ok"; result["found"] = false; return result; } xjson cadabra::CadabraJupyter::is_complete_request_impl(const std::string& code) { #ifdef DEBUG std::cerr << "Received is_complete_request" << std::endl; std::cerr << "code: " << code << std::endl; std::cerr << std::endl; #endif xjson result; result["status"] = "complete"; return result; } xjson cadabra::CadabraJupyter::kernel_info_request_impl() { xjson result; result["implementation"] = "Cadabra"; result["implementation_version"] = std::string(CADABRA_VERSION_SEM); result["language_info"]["name"] = "cadabra"; result["language_info"]["version"] = "2.0.0"; result["language_info"]["mimetype"] = "text/cadabra"; result["language_info"]["file_extension"] = ".cdb"; return result; } void cadabra::CadabraJupyter::shutdown_request_impl() {} ================================================ FILE: client_server/cadabra-jupyter-kernel.hh ================================================ #pragma once #include "xeus/xinterpreter.hpp" #include "xeus/xjson.hpp" #include "Server.hh" using xeus::xinterpreter; using xeus::xjson; namespace cadabra { /// \ingroup clientserver /// /// A Jupyter kernel for Cadabra, which provides the Cadabra /// pre-processor to enable input as in the Gtk notebook frontend. /// Built using Xeus. class CadabraJupyter : public xinterpreter, public Server { public: CadabraJupyter(); virtual ~CadabraJupyter() = default; virtual uint64_t send(const std::string& output, const std::string& msg_type, uint64_t parent_id, bool last) override; int current_counter; bool finished=true; protected: virtual void on_block_error(Block) override; private: void configure_impl() override; xjson execute_request_impl(int execution_counter, const std::string& code, bool silent, bool store_history, xjson user_expressions, bool allow_stdin) override; xjson complete_request_impl(const std::string& code, int cursor_pos) override; xjson inspect_request_impl(const std::string& code, int cursor_pos, int detail_level) override; xjson is_complete_request_impl(const std::string& code) override; xjson kernel_info_request_impl() override; void shutdown_request_impl() override; }; } ================================================ FILE: client_server/cadabra-jupyter-main.cc ================================================ #include #include #include "cadabra-jupyter-kernel.hh" #include "xeus/xkernel.hpp" #include "xeus/xkernel_configuration.hpp" int main(int argc, char* argv[]) { std::string file_name = (argc == 1) ? "connection.json" : argv[2]; xeus::xconfiguration config = xeus::load_configuration(file_name); using interpreter_ptr = std::unique_ptr; interpreter_ptr interpreter = interpreter_ptr(new cadabra::CadabraJupyter()); xeus::xkernel kernel(config, "kpeeters", std::move(interpreter)); std::cout << "starting kernel" << std::endl; kernel.start(); return 0; } ================================================ FILE: client_server/cadabra-server.cc ================================================ #include "Config.hh" #include "Snoop.hh" #include "Server.hh" #include #define NDEBUG 1 #ifdef _WIN32 #include #endif #ifdef _WIN32 std::string getRegKey(const std::string& location, const std::string& name, bool system) { HKEY key; TCHAR value[1024]; DWORD bufLen = 1024*sizeof(TCHAR); long ret; ret = RegOpenKeyExA(system?HKEY_LOCAL_MACHINE:HKEY_CURRENT_USER, location.c_str(), 0, KEY_QUERY_VALUE, &key); if( ret != ERROR_SUCCESS ){ return std::string(); } ret = RegQueryValueExA(key, name.c_str(), 0, 0, (LPBYTE) value, &bufLen); RegCloseKey(key); if ( (ret != ERROR_SUCCESS) || (bufLen > 1024*sizeof(TCHAR)) ){ return std::string(); } std::string stringValue = std::string(value, (size_t)bufLen - 1); size_t i = stringValue.length(); while( i > 0 && stringValue[i-1] == '\0' ){ --i; } return stringValue.substr(0,i); } #endif // Run a simple Cadabra server on a local port. int main(int argc, char **argv) { #ifndef ENABLE_JUPYTER snoop::log.init("CadabraServer", CADABRA_VERSION_FULL, "log.cadabra.science"); snoop::log.set_sync_immediately(true); #endif #ifdef _WIN32 snoop::log("platform") << "windows" << snoop::flush; #else #ifdef __APPLE__ snoop::log("platform") << "macos" << snoop::flush; #else snoop::log("platform") << "linux" << snoop::flush; #endif #endif int port=0; bool eod=true; if(argc>1) port=atoi(argv[1]); if(argc>2) eod=(atoi(argv[2])==1); Server server; server.run(port, eod); return 0; } #if defined(_WIN32) && defined(NDEBUG) int WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nCmdShow) { FreeConsole(); return main(__argc, __argv); } #endif ================================================ FILE: client_server/cadabra2html.cc ================================================ #include #include #include "DataCell.hh" int main(int argc, char **argv) { if(argc<2) { std::cerr << "Usage: cadabra2html [--segment] [--strip-code] [cadabra notebook] [html file]\n\n"; std::cerr << "Convert a Cadabra notebook to an HTML segment or standalone HTML file.\n" << " --segment: generate output for the cadabra web site.\n" << " --strip-code: suppress Python cells with 'def' or 'from' lines.\n" << " --hide-input-cells: suppress all input cells (overrides notebook setting).\n" << "If the HTML file name is not given, output goes to standard out.\n"; return -1; } std::string cdb_file, html_file; bool segment_only=false; bool strip_code=false; bool hide_input_cells=false; int n=1; while(n #include #include #include "DataCell.hh" int main(int argc, char **argv) { if(argc<3) { std::cerr << "Usage: cadabra2latex [--segment] [cadabra notebook] [latex file]\n\n"; std::cerr << "Convert a Cadabra v2 notebook to a standalone LaTeX file (plus images).\n"; return -1; } int i=1; bool for_embedding=false; if(argc==4) { if(std::string(argv[1])=="--segment") { for_embedding=true; } ++i; } std::string cdb_file=argv[i], latex_file=argv[i+1]; std::ifstream file(cdb_file); std::string content, line; while(std::getline(file, line)) content+=line; cadabra::DTree doc; JSON_deserialise(content, doc); std::size_t dotpos = latex_file.rfind('.'); std::string base = latex_file.substr(0, dotpos); std::string latex = export_as_LaTeX(doc, base, for_embedding); if(for_embedding) { // Ensure all sections are numbered if this will be embedded in a larger // document. latex=std::regex_replace(latex, std::regex(R"(\\section\*\{)"), "\\section\{"); latex=std::regex_replace(latex, std::regex(R"(\\subsection\*\{)"), "\\subsection\{"); latex=std::regex_replace(latex, std::regex(R"(\\LaTeX\{\})"), "LaTeX{}"); } std::ofstream latexfile(latex_file); latexfile << latex; return 0; } ================================================ FILE: client_server/connection.json ================================================ { "transport": "tcp", "ip": "127.0.0.1", "control_port": 50160, "shell_port": 57503, "stdin_port": 52597, "iopub_port": 40885, "hb_port": 42540, "signature_scheme": "hmac-sha256", "key": "a0436f6c-1916-498b-8eb9-e81ab9368e84" } ================================================ FILE: client_server/kernel.json ================================================ { "display_name": "Cadabra", "language" : "python", "argv": [ "cadabra-jupyter-kernel", "-f", "{connection_file}" ] } ================================================ FILE: client_server/notebook.html ================================================ ================================================ FILE: client_server/notebook.tex ================================================ % This is the bit of LaTeX style information that DataCell.cc needs in % order to write notebooks out in standalone LaTeX form. It is very % similar to ../frontend/common/preamble.tex; keep them in sync. \documentclass[10pt]{article} \usepackage[scale=.8]{geometry} \usepackage{setspace} \usepackage{fancyhdr} \usepackage{ytableau} \usepackage{listings} \usepackage[fleqn]{amsmath} \usepackage{color} \usepackage{changepage} \usepackage[colorlinks=true, urlcolor=black, plainpages=false, pdfpagelabels]{hyperref} \usepackage{etoolbox} \usepackage{amssymb} \usepackage[parfill]{parskip} \usepackage{graphicx} %\usepackage{tableaux} \def\specialcolon{\mathrel{\mathop{:}}\hspace{-.5em}} \renewcommand{\bar}[1]{\overline{#1}} \newcommand{\algorithm}[2]{{\tt\Large\detokenize{#1}}\\[1ex] {\emph{#2}}\\[1ex] } \newcommand{\property}[2]{{\tt\Large\detokenize{#1}}\\[1ex] {\emph{#2}}\\[1ex] } \newcommand{\algo}[1]{{\tt \detokenize{#1}}} \newcommand{\prop}[1]{{\tt \detokenize{#1}}} \renewcommand{\author}[1]{{\bfseries #1}} \newcommand{\email}[1]{, {\tt #1}} %\makeatletter\def\old@comma{,}\catcode`\,=13 \def,{% %\ifmmode\old@comma\discretionary{}{}{}\else\old@comma\fi}\makeatother \newcommand{\bigO}{{\cal O}} % Math expressions wrapped in \brwrap will get typeset with % round brackets around them, which have the appropriate size. % The expression itself can still be broken over multiple lines. \newcommand\brwrap[3]{% \setbox0=\hbox{$#2$} \left#1\vbox to \the\ht0{\hbox to 0pt{}}\right.\kern-.2em \begingroup #2\endgroup\kern-.15em \left.\vbox to \the\ht0{\hbox to 0pt{}}\right#3 } \renewcommand{\arraystretch}{1.2} \tolerance=10000 \relpenalty=10 \binoppenalty=10 \hyphenpenalty=10 \raggedright \lstnewenvironment{python}[1][]{\lstset{language=python, columns=fullflexible, xleftmargin=1em, belowskip=0pt, tabsize=3, commentstyle={}, % otherwise {#} cadabra arguments look ugly breaklines=true, basicstyle=\small\ttfamily\color{blue}, keywordstyle={} }}{} \everymath{\displaystyle} % Page numbers \pagestyle{fancy} \fancyhf{} % clear all header and footer fields \renewcommand{\headrulewidth}{0pt} \renewcommand{\footrulewidth}{0pt} \fancyfoot[LE,RO]{{\small\thepage}} \fancyfoot[LO,RE]{{\tiny\href{https://cadabra.science}{{\tt https://cadabra.science}}}} % \makeatletter\def\old@comma{,}\catcode`\,=13 \def,{% % \ifmmode\old@comma\discretionary{}{}{}\else\old@comma\fi}\makeatother % Ensure that maths broken over multiple lines has a bit of spacing % between lines. \lineskiplimit=0mm \lineskip=1.5ex % % Typesetting Young tableaux, originally in a separate style % % file, now included to avoid path searching problems. % % Some internals for the typesetting macros below; nothing % % user-servicable here; please read on. % % \def\@tabforc#1#2#3{\expandafter\tabf@rc\expandafter#1{#2 \^}{#3}} % \def\tabf@@rc#1#2#3\tabf@@rc#4{\def#1{#2}#4\tabf@rc#1{#3}{#4}} % \long\def\ReturnAfterFi#1\fi{\fi#1} % \def\tabf@rc#1#2#3{% % \def\temp@ty{#2}% % \ifx\@empty\temp@ty % \else % \ReturnAfterFi{% % \tabf@@rc#1#2\tabf@@rc{#3}% % }% % \fi % }% % % % Sorry, some global registers for sizes and keeping track of % % measurements. % % \newdimen\ytsize\ytsize=2mm % \newdimen\ytfsize\ytfsize=4mm % \newcount\repcnt % \newdimen\acchspace % \newdimen\accvspace % \newdimen\raiseh % \newdimen\maxw % % \newcommand\phrule[1]{\hbox{\vbox to0pt{\hrule height .2pt width#1\vss}}} % % % Typeset a Young tableau with filled boxes. Takes a single % % argument which is a string of symbols for each row, % % separated by commas. Examples: % % % % \ftableau{abc,de} % % \ftableau{ab{d_2},f{g_3}} % % \newcommand\ftableau[1]{% % \def\ctest{,} % \def\Ktest{\^} % \acchspace=0ex % \accvspace=0ex % \maxw=0ex % \vbox{\hbox{% % \@tabforc\thisel{#1}{% % \ifx\thisel\Ktest{% % \ifnum\maxw=0\maxw=\acchspace\fi% % \raisebox{\accvspace}{\vbox to \ytfsize{\hbox to % 0pt{\vrule height \ytfsize\hss}}}\kern\acchspace\kern-\maxw} % \else\ifx\thisel\ctest % \ifnum\maxw=0\maxw=\acchspace\fi% % \raisebox{\accvspace}{\vbox to \ytfsize{\hbox to 0pt{\vrule height \ytfsize\hss}}}% % \kern\acchspace\acchspace=0ex % \advance\accvspace by -\ytfsize % \else % \setbox3=\hbox{$\thisel$}% % \raiseh=\ytfsize% % \advance\raiseh by -1ex% % \divide\raiseh by 2% % \advance\acchspace by-\ytfsize% % \raisebox{\accvspace}{\vbox to \ytfsize{\hrule\hbox to% % \ytfsize{\vrule height \ytfsize\hskip.5ex% % \raisebox{\raiseh}{\tiny$\thisel$}\hss}\vss\phrule{\ytfsize}}}% % \fi\fi}}}} % % % Typeset a Young tableau with unlabelled boxes. Takes a single % % argument which is a string of numbers, one for the length of % % each row of the tableau. Example: % % % % \tableau{{10}{8}{3}} % % % % typesets a tableau with 10 boxes in the 1st row, 8 in the 2nd % % and 3 in the 3rd. Curly brackets can be omitted if numbers % % are less than 10. % % \newcommand\tableau[1]{% % \def\stest{ } % \def\Ktest{\^} % \acchspace=0ex % \accvspace=0ex % \maxw=0ex % \hbox{% % \@tabforc\thisel{#1}{% % \ifx\thisel\Ktest{} % \else % \repcnt=\thisel% % \loop{}% % \advance\acchspace by-\ytsize% % \raisebox{\accvspace}{\vbox to \ytsize{\hrule \hbox to% % \ytsize{\vrule height \ytsize\hss}\vss\phrule{\ytsize}}}% % \advance\repcnt by -1\ifnum\repcnt>1{}\repeat% % \ifnum\maxw=0\maxw=\acchspace\fi% % \raisebox{\accvspace}{\vbox to \ytsize{\hbox to 0pt{\vrule height \ytsize\hss}}}% % \kern\acchspace\acchspace=0ex% % \advance\accvspace by -\ytsize% % \fi}\kern-\maxw}} \ytableausetup{centertableaux} % smalltableaux ================================================ FILE: client_server/popen2.cc ================================================ // http://stackoverflow.com/questions/26852198/getting-the-pid-from-popen #include "popen2.hh" #include #include #define READ 0 #define WRITE 1 FILE * popen2(std::string command, std::string type, int & pid) { pid_t child_pid; int fd[2]; if(pipe(fd)==-1) throw std::logic_error("popen2 failed constructing pipe"); if((child_pid = fork()) == -1) { perror("fork"); exit(1); } /* child process */ if (child_pid == 0) { if (type == "r") { close(fd[READ]); //Close the READ end of the pipe since the child's fd is write-only dup2(fd[WRITE], 1); //Redirect stdout to pipe } else { close(fd[WRITE]); //Close the WRITE end of the pipe since the child's fd is read-only dup2(fd[READ], 0); //Redirect stdin to pipe } execl("/bin/sh", "/bin/sh", "-c", command.c_str(), NULL); exit(0); } else { if (type == "r") { close(fd[WRITE]); //Close the WRITE end of the pipe since parent's fd is read-only } else { close(fd[READ]); //Close the READ end of the pipe since parent's fd is write-only } } pid = child_pid; if (type == "r") { return fdopen(fd[READ], "r"); } return fdopen(fd[WRITE], "w"); } int pclose2(FILE * fp, pid_t pid) { int stat; fclose(fp); while (waitpid(pid, &stat, 0) == -1) { if (errno != EINTR) { stat = -1; break; } } return stat; } ================================================ FILE: client_server/popen2.hh ================================================ #pragma once #include #include #include #include #include #include FILE * popen2(std::string command, std::string type, int & pid); int pclose2(FILE * fp, pid_t pid); ================================================ FILE: client_server/regexp_tester.cc ================================================ #include #include #include int main(int argc, char **argv) { if(argc<3) { std::cerr << "Usage: regexp_tester [regex] [string]" << std::endl; return -1; } try { std::regex match(argv[1]); std::smatch res; std::string arg = argv[2]; if(std::regex_search(arg, res, match)) { for(unsigned int i=0; i #include class MyClient : public cadabra::Client { public: MyClient(); void on_connect(); void on_disconnect(); void on_network_error(); void on_progress(); void before_tree_change(ActionBase&); void after_tree_change(ActionBase&); }; class UI { public: void run(); }; MyClient client; UI ui; MyClient::MyClient() : Client(0) { } void MyClient::on_connect() { std::cout << "connected to server" << std::endl; } void MyClient::on_disconnect() { std::cout << "disconnected from server" << std::endl; } void MyClient::on_network_error() { std::cout << "network error" << std::endl; } void MyClient::on_progress() { } void MyClient::before_tree_change(ActionBase& ab) { } void MyClient::after_tree_change(ActionBase& ab) { } void UI::run() { int i; std::cin >> i; cadabra::Client::iterator it=client.dtree().begin(); auto cell = std::make_shared(); auto ac = std::make_shared(cell, it, cadabra::Client::ActionAddCell::Position::child); try { std::cout << "calling perform" << std::endl; client.perform(ac); } catch(std::error_code& ex) { std::cout << ex.message() << std::endl; } std::cout << "perform called" << std::endl; sleep(10); } int main(int, char **) { // client.init(); std::cout << "client connected" << std::endl; try { // Spawn two threads. std::thread client_thread(&MyClient::run, std::ref(client)); std::thread ui_thread(&UI::run, ui); // Wait for all threads to finish. client_thread.join(); ui_thread.join(); } catch(std::error_code& ex) { std::cout << ex.message() << std::endl; } } ================================================ FILE: client_server/test_talk_to_server.cc ================================================ #include #include #include // Simple test program to talk to a cadabra server. using websocketpp::lib::placeholders::_1; using websocketpp::lib::placeholders::_2; using websocketpp::lib::bind; typedef websocketpp::client client; typedef websocketpp::config::asio_client::message_type::ptr message_ptr; bool stopit=false; void on_open(client* c, websocketpp::connection_hdl hdl) { // now it is safe to use the connection std::cout << "connection ready" << std::endl; std::string msg; if(stopit) { msg = "{ \"header\": { \"uuid\": \"none\", \"msg_type\": \"execute_interrupt\" }," " \"content\": { \"code\": \"print(42)\n\"} " "}"; } else { msg = "{ \"header\": { \"uuid\": \"none\", \"msg_type\": \"execute_request\" }," " \"content\": { \"code\": \"import time\nprint(42)\ntime.sleep(10)\n\"} " "}"; } // c->send(hdl, "import time\nfor i in range(0,10):\n print('this is python talking '+str(i))\nex=Ex('A_{m n}')\nprint(str(ex))", websocketpp::frame::opcode::text); c->send(hdl, msg, websocketpp::frame::opcode::text); } void on_message(client* c, websocketpp::connection_hdl hdl, message_ptr msg) { client::connection_ptr con = c->get_con_from_hdl(hdl); std::cout << "received message on channel " << con->get_resource() << std::endl; std::cout << msg->get_payload() << std::endl; } int main(int argc, char **argv) { if(argc>1) stopit=true; client c; c.clear_access_channels(websocketpp::log::alevel::all); c.clear_error_channels(websocketpp::log::elevel::all); std::string uri = "ws://localhost:9002"; c.init_asio(); c.set_open_handler(bind(&on_open,&c,::_1)); c.set_message_handler(bind(&on_message,&c,::_1,::_2)); websocketpp::lib::error_code ec; client::connection_ptr con = c.get_connection(uri, ec); c.connect(con); std::cout << "connected" << std::endl; // Start the ASIO io_service run loop c.run(); std::cout << "run loop terminated" << std::endl; } ================================================ FILE: client_server/tree.hh ================================================ // STL-like templated tree class. // // Copyright (C) 2001-2014 Kasper Peeters // Distributed under the GNU General Public License version 3. // // Special permission to use tree.hh under the conditions of a // different license can be requested from the author. /** \mainpage tree.hh \author Kasper Peeters \version 3.0 \date 25-Dec-2014 \see http://tree.phi-sci.com/ \see http://tree.phi-sci.com/ChangeLog The tree.hh library for C++ provides an STL-like container class for n-ary trees, templated over the data stored at the nodes. Various types of iterators are provided (post-order, pre-order, and others). Where possible the access methods are compatible with the STL or alternative algorithms are available. */ #ifndef tree_hh_ #define tree_hh_ #include #include #include #include #include #include #include #include /// A node in the tree, combining links to other nodes as well as the actual data. template class tree_node_ { // size: 5*4=20 bytes (on 32 bit arch), can be reduced by 8. public: tree_node_(); tree_node_(const T&); tree_node_ *parent; tree_node_ *first_child, *last_child; tree_node_ *prev_sibling, *next_sibling; T data; }; // __attribute__((packed)); template tree_node_::tree_node_() : parent(0), first_child(0), last_child(0), prev_sibling(0), next_sibling(0) { } template tree_node_::tree_node_(const T& val) : parent(0), first_child(0), last_child(0), prev_sibling(0), next_sibling(0), data(val) { } template > > class tree { protected: typedef tree_node_ tree_node; public: /// Value of the data stored at a node. typedef T value_type; class iterator_base; class pre_order_iterator; class post_order_iterator; class sibling_iterator; class leaf_iterator; tree(); // empty constructor tree(const T&); // constructor setting given element as head tree(const iterator_base&); tree(const tree&); // copy constructor tree(tree&&); // move constructor ~tree(); tree& operator=(const tree&); // copy assignment tree& operator=(tree&&); // move assignment /// Base class for iterators, only pointers stored, no traversal logic. #ifdef __SGI_STL_PORT class iterator_base : public stlport::bidirectional_iterator { #else class iterator_base { #endif public: typedef T value_type; typedef T* pointer; typedef T& reference; typedef size_t size_type; typedef ptrdiff_t difference_type; typedef std::bidirectional_iterator_tag iterator_category; iterator_base(); iterator_base(tree_node *); T& operator*() const; T* operator->() const; /// When called, the next increment/decrement skips children of this node. void skip_children(); void skip_children(bool skip); /// Number of children of the node pointed to by the iterator. unsigned int number_of_children() const; sibling_iterator begin() const; sibling_iterator end() const; tree_node *node; protected: bool skip_current_children_; }; /// Depth-first iterator, first accessing the node, then its children. class pre_order_iterator : public iterator_base { public: pre_order_iterator(); pre_order_iterator(tree_node *); pre_order_iterator(const iterator_base&); pre_order_iterator(const sibling_iterator&); bool operator==(const pre_order_iterator&) const; bool operator!=(const pre_order_iterator&) const; pre_order_iterator& operator++(); pre_order_iterator& operator--(); pre_order_iterator operator++(int); pre_order_iterator operator--(int); pre_order_iterator& operator+=(unsigned int); pre_order_iterator& operator-=(unsigned int); pre_order_iterator& next_skip_children(); }; /// Depth-first iterator, first accessing the children, then the node itself. class post_order_iterator : public iterator_base { public: post_order_iterator(); post_order_iterator(tree_node *); post_order_iterator(const iterator_base&); post_order_iterator(const sibling_iterator&); bool operator==(const post_order_iterator&) const; bool operator!=(const post_order_iterator&) const; post_order_iterator& operator++(); post_order_iterator& operator--(); post_order_iterator operator++(int); post_order_iterator operator--(int); post_order_iterator& operator+=(unsigned int); post_order_iterator& operator-=(unsigned int); /// Set iterator to the first child as deep as possible down the tree. void descend_all(); }; /// Breadth-first iterator, using a queue class breadth_first_queued_iterator : public iterator_base { public: breadth_first_queued_iterator(); breadth_first_queued_iterator(tree_node *); breadth_first_queued_iterator(const iterator_base&); bool operator==(const breadth_first_queued_iterator&) const; bool operator!=(const breadth_first_queued_iterator&) const; breadth_first_queued_iterator& operator++(); breadth_first_queued_iterator operator++(int); breadth_first_queued_iterator& operator+=(unsigned int); private: std::queue traversal_queue; }; /// The default iterator types throughout the tree class. typedef pre_order_iterator iterator; typedef breadth_first_queued_iterator breadth_first_iterator; /// Iterator which traverses only the nodes at a given depth from the root. class fixed_depth_iterator : public iterator_base { public: fixed_depth_iterator(); fixed_depth_iterator(tree_node *); fixed_depth_iterator(const iterator_base&); fixed_depth_iterator(const sibling_iterator&); fixed_depth_iterator(const fixed_depth_iterator&); bool operator==(const fixed_depth_iterator&) const; bool operator!=(const fixed_depth_iterator&) const; fixed_depth_iterator& operator++(); fixed_depth_iterator& operator--(); fixed_depth_iterator operator++(int); fixed_depth_iterator operator--(int); fixed_depth_iterator& operator+=(unsigned int); fixed_depth_iterator& operator-=(unsigned int); tree_node *top_node; }; /// Iterator which traverses only the nodes which are siblings of each other. class sibling_iterator : public iterator_base { public: sibling_iterator(); sibling_iterator(tree_node *); sibling_iterator(const sibling_iterator&); sibling_iterator(const iterator_base&); bool operator==(const sibling_iterator&) const; bool operator!=(const sibling_iterator&) const; sibling_iterator& operator++(); sibling_iterator& operator--(); sibling_iterator operator++(int); sibling_iterator operator--(int); sibling_iterator& operator+=(unsigned int); sibling_iterator& operator-=(unsigned int); tree_node *range_first() const; tree_node *range_last() const; tree_node *parent_; private: void set_parent_(); }; /// Iterator which traverses only the leaves. class leaf_iterator : public iterator_base { public: leaf_iterator(); leaf_iterator(tree_node *, tree_node *top=0); leaf_iterator(const sibling_iterator&); leaf_iterator(const iterator_base&); bool operator==(const leaf_iterator&) const; bool operator!=(const leaf_iterator&) const; leaf_iterator& operator++(); leaf_iterator& operator--(); leaf_iterator operator++(int); leaf_iterator operator--(int); leaf_iterator& operator+=(unsigned int); leaf_iterator& operator-=(unsigned int); private: tree_node *top_node; }; /// Return iterator to the beginning of the tree. inline pre_order_iterator begin() const; /// Return iterator to the end of the tree. inline pre_order_iterator end() const; /// Return post-order iterator to the beginning of the tree. post_order_iterator begin_post() const; /// Return post-order end iterator of the tree. post_order_iterator end_post() const; /// Return fixed-depth iterator to the first node at a given depth from the given iterator. fixed_depth_iterator begin_fixed(const iterator_base&, unsigned int) const; /// Return fixed-depth end iterator. fixed_depth_iterator end_fixed(const iterator_base&, unsigned int) const; /// Return breadth-first iterator to the first node at a given depth. breadth_first_queued_iterator begin_breadth_first() const; /// Return breadth-first end iterator. breadth_first_queued_iterator end_breadth_first() const; /// Return sibling iterator to the first child of given node. sibling_iterator begin(const iterator_base&) const; /// Return sibling end iterator for children of given node. sibling_iterator end(const iterator_base&) const; /// Return leaf iterator to the first leaf of the tree. leaf_iterator begin_leaf() const; /// Return leaf end iterator for entire tree. leaf_iterator end_leaf() const; /// Return leaf iterator to the first leaf of the subtree at the given node. leaf_iterator begin_leaf(const iterator_base& top) const; /// Return leaf end iterator for the subtree at the given node. leaf_iterator end_leaf(const iterator_base& top) const; /// Return iterator to the parent of a node. template static iter parent(iter); /// Return iterator to the previous sibling of a node. template static iter previous_sibling(iter); /// Return iterator to the next sibling of a node. template static iter next_sibling(iter); /// Return iterator to the next node at a given depth. template iter next_at_same_depth(iter) const; /// Erase all nodes of the tree. void clear(); /// Erase element at position pointed to by iterator, return incremented iterator. template iter erase(iter); /// Erase all children of the node pointed to by iterator. void erase_children(const iterator_base&); /// Insert empty node as last/first child of node pointed to by position. template iter append_child(iter position); template iter prepend_child(iter position); /// Insert node as last/first child of node pointed to by position. template iter append_child(iter position, const T& x); template iter prepend_child(iter position, const T& x); /// Append the node (plus its children) at other_position as last/first child of position. template iter append_child(iter position, iter other_position); template iter prepend_child(iter position, iter other_position); /// Append the nodes in the from-to range (plus their children) as last/first children of position. template iter append_children(iter position, sibling_iterator from, sibling_iterator to); template iter prepend_children(iter position, sibling_iterator from, sibling_iterator to); /// Short-hand to insert topmost node in otherwise empty tree. pre_order_iterator set_head(const T& x); /// Insert node as previous sibling of node pointed to by position. template iter insert(iter position, const T& x); /// Specialisation of previous member. sibling_iterator insert(sibling_iterator position, const T& x); /// Insert node (with children) pointed to by subtree as previous sibling of node pointed to by position. /// Does not change the subtree itself (use move_in or move_in_below for that). template iter insert_subtree(iter position, const iterator_base& subtree); /// Insert node as next sibling of node pointed to by position. template iter insert_after(iter position, const T& x); /// Insert node (with children) pointed to by subtree as next sibling of node pointed to by position. template iter insert_subtree_after(iter position, const iterator_base& subtree); /// Replace node at 'position' with other node (keeping same children); 'position' becomes invalid. template iter replace(iter position, const T& x); /// Replace node at 'position' with subtree starting at 'from' (do not erase subtree at 'from'); see above. template iter replace(iter position, const iterator_base& from); /// Replace string of siblings (plus their children) with copy of a new string (with children); see above sibling_iterator replace(sibling_iterator orig_begin, sibling_iterator orig_end, sibling_iterator new_begin, sibling_iterator new_end); /// Move all children of node at 'position' to be siblings, returns position. template iter flatten(iter position); /// Move nodes in range to be children of 'position'. template iter reparent(iter position, sibling_iterator begin, sibling_iterator end); /// Move all child nodes of 'from' to be children of 'position'. template iter reparent(iter position, iter from); /// Replace node with a new node, making the old node a child of the new node. template iter wrap(iter position, const T& x); /// Move 'source' node (plus its children) to become the next sibling of 'target'. template iter move_after(iter target, iter source); /// Move 'source' node (plus its children) to become the previous sibling of 'target'. template iter move_before(iter target, iter source); sibling_iterator move_before(sibling_iterator target, sibling_iterator source); /// Move 'source' node (plus its children) to become the node at 'target' (erasing the node at 'target'). template iter move_ontop(iter target, iter source); /// Extract the subtree starting at the indicated node, removing it from the original tree. tree move_out(iterator); /// Inverse of take_out: inserts the given tree as previous sibling of indicated node by a /// move operation, that is, the given tree becomes empty. Returns iterator to the top node. template iter move_in(iter, tree&); /// As above, but now make the tree a child of the indicated node. template iter move_in_below(iter, tree&); /// As above, but now make the tree the nth child of the indicated node (if possible). template iter move_in_as_nth_child(iter, size_t, tree&); /// Merge with other tree, creating new branches and leaves only if they are not already present. void merge(sibling_iterator, sibling_iterator, sibling_iterator, sibling_iterator, bool duplicate_leaves=false); /// Sort (std::sort only moves values of nodes, this one moves children as well). void sort(sibling_iterator from, sibling_iterator to, bool deep=false); template void sort(sibling_iterator from, sibling_iterator to, StrictWeakOrdering comp, bool deep=false); /// Compare two ranges of nodes (compares nodes as well as tree structure). template bool equal(const iter& one, const iter& two, const iter& three) const; template bool equal(const iter& one, const iter& two, const iter& three, BinaryPredicate) const; template bool equal_subtree(const iter& one, const iter& two) const; template bool equal_subtree(const iter& one, const iter& two, BinaryPredicate) const; /// Extract a new tree formed by the range of siblings plus all their children. tree subtree(sibling_iterator from, sibling_iterator to) const; void subtree(tree&, sibling_iterator from, sibling_iterator to) const; /// Exchange the node (plus subtree) with its sibling node (do nothing if no sibling present). void swap(sibling_iterator it); /// Exchange two nodes (plus subtrees) void swap(iterator, iterator); /// Count the total number of nodes. size_t size() const; /// Count the total number of nodes below the indicated node (plus one). size_t size(const iterator_base&) const; /// Check if tree is empty. bool empty() const; /// Compute the depth to the root or to a fixed other iterator. static int depth(const iterator_base&); static int depth(const iterator_base&, const iterator_base&); /// Determine the maximal depth of the tree. An empty tree has max_depth=-1. int max_depth() const; /// Determine the maximal depth of the tree with top node at the given position. int max_depth(const iterator_base&) const; /// Count the number of children of node at position. static unsigned int number_of_children(const iterator_base&); /// Count the number of siblings (left and right) of node at iterator. Total nodes at this level is +1. unsigned int number_of_siblings(const iterator_base&) const; /// Determine whether node at position is in the subtrees with root in the range. bool is_in_subtree(const iterator_base& position, const iterator_base& begin, const iterator_base& end) const; /// Determine whether the iterator is an 'end' iterator and thus not actually pointing to a node. bool is_valid(const iterator_base&) const; /// Find the lowest common ancestor of two nodes, that is, the deepest node such that /// both nodes are descendants of it. iterator lowest_common_ancestor(const iterator_base&, const iterator_base &) const; /// Determine the index of a node in the range of siblings to which it belongs. unsigned int index(sibling_iterator it) const; /// Inverse of 'index': return the n-th child of the node at position. static sibling_iterator child(const iterator_base& position, unsigned int); /// Return iterator to the sibling indicated by index sibling_iterator sibling(const iterator_base& position, unsigned int); /// For debugging only: verify internal consistency by inspecting all pointers in the tree /// (which will also trigger a valgrind error in case something got corrupted). void debug_verify_consistency() const; /// Comparator class for iterators (compares pointer values; why doesn't this work automatically?) class iterator_base_less { public: bool operator()(const typename tree::iterator_base& one, const typename tree::iterator_base& two) const { return one.node < two.node; } }; tree_node *head, *feet; // head/feet are always dummy; if an iterator points to them it is invalid private: tree_node_allocator alloc_; void head_initialise_(); void copy_(const tree& other); /// Comparator class for two nodes of a tree (used for sorting and searching). template class compare_nodes { public: compare_nodes(StrictWeakOrdering comp) : comp_(comp) {}; bool operator()(const tree_node *a, const tree_node *b) { return comp_(a->data, b->data); } private: StrictWeakOrdering comp_; }; }; //template //class iterator_base_less { // public: // bool operator()(const typename tree::iterator_base& one, // const typename tree::iterator_base& two) const // { // txtout << "operatorclass<" << one.node < two.node << std::endl; // return one.node < two.node; // } //}; // template // bool operator<(const typename tree::iterator& one, // const typename tree::iterator& two) // { // txtout << "operator< " << one.node < two.node << std::endl; // if(one.node < two.node) return true; // return false; // } // // template // bool operator==(const typename tree::iterator& one, // const typename tree::iterator& two) // { // txtout << "operator== " << one.node == two.node << std::endl; // if(one.node == two.node) return true; // return false; // } // // template // bool operator>(const typename tree::iterator_base& one, // const typename tree::iterator_base& two) // { // txtout << "operator> " << one.node < two.node << std::endl; // if(one.node > two.node) return true; // return false; // } // Tree template tree::tree() { head_initialise_(); } template tree::tree(const T& x) { head_initialise_(); set_head(x); } template tree::tree(tree&& x) { head_initialise_(); head->next_sibling=x.head->next_sibling; feet->prev_sibling=x.head->prev_sibling; x.head->next_sibling->prev_sibling=head; x.feet->prev_sibling->next_sibling=feet; x.head->next_sibling=x.feet; x.feet->prev_sibling=x.head; } template tree::tree(const iterator_base& other) { head_initialise_(); set_head((*other)); replace(begin(), other); } template tree::~tree() { clear(); alloc_.destroy(head); alloc_.destroy(feet); alloc_.deallocate(head,1); alloc_.deallocate(feet,1); } template void tree::head_initialise_() { head = alloc_.allocate(1,0); // MSVC does not have default second argument feet = alloc_.allocate(1,0); alloc_.construct(head, tree_node_()); alloc_.construct(feet, tree_node_()); head->parent=0; head->first_child=0; head->last_child=0; head->prev_sibling=0; //head; head->next_sibling=feet; //head; feet->parent=0; feet->first_child=0; feet->last_child=0; feet->prev_sibling=head; feet->next_sibling=0; } template tree& tree::operator=(const tree& other) { if(this != &other) copy_(other); return *this; } template tree& tree::operator=(tree&& x) { if(this != &x) { head->next_sibling=x.head->next_sibling; feet->prev_sibling=x.head->prev_sibling; x.head->next_sibling->prev_sibling=head; x.feet->prev_sibling->next_sibling=feet; x.head->next_sibling=x.feet; x.feet->prev_sibling=x.head; } return *this; } template tree::tree(const tree& other) { head_initialise_(); copy_(other); } template void tree::copy_(const tree& other) { clear(); pre_order_iterator it=other.begin(), to=begin(); while(it!=other.end()) { to=insert(to, (*it)); it.skip_children(); ++it; } to=begin(); it=other.begin(); while(it!=other.end()) { to=replace(to, it); to.skip_children(); it.skip_children(); ++to; ++it; } } template void tree::clear() { if(head) while(head->next_sibling!=feet) erase(pre_order_iterator(head->next_sibling)); } template void tree::erase_children(const iterator_base& it) { // std::cout << "erase_children " << it.node << std::endl; if(it.node==0) return; tree_node *cur=it.node->first_child; tree_node *prev=0; while(cur!=0) { prev=cur; cur=cur->next_sibling; erase_children(pre_order_iterator(prev)); // kp::destructor(&prev->data); alloc_.destroy(prev); alloc_.deallocate(prev,1); } it.node->first_child=0; it.node->last_child=0; // std::cout << "exit" << std::endl; } template template iter tree::erase(iter it) { tree_node *cur=it.node; assert(cur!=head); iter ret=it; ret.skip_children(); ++ret; erase_children(it); if(cur->prev_sibling==0) { cur->parent->first_child=cur->next_sibling; } else { cur->prev_sibling->next_sibling=cur->next_sibling; } if(cur->next_sibling==0) { cur->parent->last_child=cur->prev_sibling; } else { cur->next_sibling->prev_sibling=cur->prev_sibling; } // kp::destructor(&cur->data); alloc_.destroy(cur); alloc_.deallocate(cur,1); return ret; } template typename tree::pre_order_iterator tree::begin() const { return pre_order_iterator(head->next_sibling); } template typename tree::pre_order_iterator tree::end() const { return pre_order_iterator(feet); } template typename tree::breadth_first_queued_iterator tree::begin_breadth_first() const { return breadth_first_queued_iterator(head->next_sibling); } template typename tree::breadth_first_queued_iterator tree::end_breadth_first() const { return breadth_first_queued_iterator(); } template typename tree::post_order_iterator tree::begin_post() const { tree_node *tmp=head->next_sibling; if(tmp!=feet) { while(tmp->first_child) tmp=tmp->first_child; } return post_order_iterator(tmp); } template typename tree::post_order_iterator tree::end_post() const { return post_order_iterator(feet); } template typename tree::fixed_depth_iterator tree::begin_fixed(const iterator_base& pos, unsigned int dp) const { typename tree::fixed_depth_iterator ret; ret.top_node=pos.node; tree_node *tmp=pos.node; unsigned int curdepth=0; while(curdepthfirst_child==0) { if(tmp->next_sibling==0) { // try to walk up and then right again do { if(tmp==ret.top_node) throw std::range_error("tree: begin_fixed out of range"); tmp=tmp->parent; if(tmp==0) throw std::range_error("tree: begin_fixed out of range"); --curdepth; } while(tmp->next_sibling==0); } tmp=tmp->next_sibling; } tmp=tmp->first_child; ++curdepth; } ret.node=tmp; return ret; } template typename tree::fixed_depth_iterator tree::end_fixed(const iterator_base& pos, unsigned int dp) const { assert(1==0); // FIXME: not correct yet: use is_valid() as a temporary workaround tree_node *tmp=pos.node; unsigned int curdepth=1; while(curdepthfirst_child==0) { tmp=tmp->next_sibling; if(tmp==0) throw std::range_error("tree: end_fixed out of range"); } tmp=tmp->first_child; ++curdepth; } return tmp; } template typename tree::sibling_iterator tree::begin(const iterator_base& pos) const { assert(pos.node!=0); if(pos.node->first_child==0) { return end(pos); } return pos.node->first_child; } template typename tree::sibling_iterator tree::end(const iterator_base& pos) const { sibling_iterator ret(0); ret.parent_=pos.node; return ret; } template typename tree::leaf_iterator tree::begin_leaf() const { tree_node *tmp=head->next_sibling; if(tmp!=feet) { while(tmp->first_child) tmp=tmp->first_child; } return leaf_iterator(tmp); } template typename tree::leaf_iterator tree::end_leaf() const { return leaf_iterator(feet); } template typename tree::leaf_iterator tree::begin_leaf(const iterator_base& top) const { tree_node *tmp=top.node; while(tmp->first_child) tmp=tmp->first_child; return leaf_iterator(tmp, top.node); } template typename tree::leaf_iterator tree::end_leaf(const iterator_base& top) const { return leaf_iterator(top.node, top.node); } template template iter tree::parent(iter position) { assert(position.node!=0); return iter(position.node->parent); } template template iter tree::previous_sibling(iter position) { assert(position.node!=0); iter ret(position); ret.node=position.node->prev_sibling; return ret; } template template iter tree::next_sibling(iter position) { assert(position.node!=0); iter ret(position); ret.node=position.node->next_sibling; return ret; } template template iter tree::next_at_same_depth(iter position) const { // We make use of a temporary fixed_depth iterator to implement this. typename tree::fixed_depth_iterator tmp(position.node); ++tmp; return iter(tmp); // assert(position.node!=0); // iter ret(position); // // if(position.node->next_sibling) { // ret.node=position.node->next_sibling; // } // else { // int relative_depth=0; // upper: // do { // ret.node=ret.node->parent; // if(ret.node==0) return ret; // --relative_depth; // } while(ret.node->next_sibling==0); // lower: // ret.node=ret.node->next_sibling; // while(ret.node->first_child==0) { // if(ret.node->next_sibling==0) // goto upper; // ret.node=ret.node->next_sibling; // if(ret.node==0) return ret; // } // while(relative_depth<0 && ret.node->first_child!=0) { // ret.node=ret.node->first_child; // ++relative_depth; // } // if(relative_depth<0) { // if(ret.node->next_sibling==0) goto upper; // else goto lower; // } // } // return ret; } template template iter tree::append_child(iter position) { assert(position.node!=head); assert(position.node!=feet); assert(position.node); tree_node *tmp=alloc_.allocate(1,0); alloc_.construct(tmp, tree_node_()); // kp::constructor(&tmp->data); tmp->first_child=0; tmp->last_child=0; tmp->parent=position.node; if(position.node->last_child!=0) { position.node->last_child->next_sibling=tmp; } else { position.node->first_child=tmp; } tmp->prev_sibling=position.node->last_child; position.node->last_child=tmp; tmp->next_sibling=0; return tmp; } template template iter tree::prepend_child(iter position) { assert(position.node!=head); assert(position.node!=feet); assert(position.node); tree_node *tmp=alloc_.allocate(1,0); alloc_.construct(tmp, tree_node_()); // kp::constructor(&tmp->data); tmp->first_child=0; tmp->last_child=0; tmp->parent=position.node; if(position.node->first_child!=0) { position.node->first_child->prev_sibling=tmp; } else { position.node->last_child=tmp; } tmp->next_sibling=position.node->first_child; position.node->prev_child=tmp; tmp->prev_sibling=0; return tmp; } template template iter tree::append_child(iter position, const T& x) { // If your program fails here you probably used 'append_child' to add the top // node to an empty tree. From version 1.45 the top element should be added // using 'insert'. See the documentation for further information, and sorry about // the API change. assert(position.node!=head); assert(position.node!=feet); assert(position.node); tree_node* tmp = alloc_.allocate(1,0); alloc_.construct(tmp, x); // kp::constructor(&tmp->data, x); tmp->first_child=0; tmp->last_child=0; tmp->parent=position.node; if(position.node->last_child!=0) { position.node->last_child->next_sibling=tmp; } else { position.node->first_child=tmp; } tmp->prev_sibling=position.node->last_child; position.node->last_child=tmp; tmp->next_sibling=0; return tmp; } template template iter tree::prepend_child(iter position, const T& x) { assert(position.node!=head); assert(position.node!=feet); assert(position.node); tree_node* tmp = alloc_.allocate(1,0); alloc_.construct(tmp, x); // kp::constructor(&tmp->data, x); tmp->first_child=0; tmp->last_child=0; tmp->parent=position.node; if(position.node->first_child!=0) { position.node->first_child->prev_sibling=tmp; } else { position.node->last_child=tmp; } tmp->next_sibling=position.node->first_child; position.node->first_child=tmp; tmp->prev_sibling=0; return tmp; } template template iter tree::append_child(iter position, iter other) { assert(position.node!=head); assert(position.node!=feet); assert(position.node); sibling_iterator aargh=append_child(position, value_type()); return replace(aargh, other); } template template iter tree::prepend_child(iter position, iter other) { assert(position.node!=head); assert(position.node!=feet); assert(position.node); sibling_iterator aargh=prepend_child(position, value_type()); return replace(aargh, other); } template template iter tree::append_children(iter position, sibling_iterator from, sibling_iterator to) { assert(position.node!=head); assert(position.node!=feet); assert(position.node); iter ret=from; while(from!=to) { insert_subtree(position.end(), from); ++from; } return ret; } template template iter tree::prepend_children(iter position, sibling_iterator from, sibling_iterator to) { assert(position.node!=head); assert(position.node!=feet); assert(position.node); iter ret=from; while(from!=to) { insert_subtree(position.begin(), from); ++from; } return ret; } template typename tree::pre_order_iterator tree::set_head(const T& x) { assert(head->next_sibling==feet); return insert(iterator(feet), x); } template template iter tree::insert(iter position, const T& x) { if(position.node==0) { position.node=feet; // Backward compatibility: when calling insert on a null node, // insert before the feet. } tree_node* tmp = alloc_.allocate(1,0); alloc_.construct(tmp, x); // kp::constructor(&tmp->data, x); tmp->first_child=0; tmp->last_child=0; tmp->parent=position.node->parent; tmp->next_sibling=position.node; tmp->prev_sibling=position.node->prev_sibling; position.node->prev_sibling=tmp; if(tmp->prev_sibling==0) { if(tmp->parent) // when inserting nodes at the head, there is no parent tmp->parent->first_child=tmp; } else tmp->prev_sibling->next_sibling=tmp; return tmp; } template typename tree::sibling_iterator tree::insert(sibling_iterator position, const T& x) { tree_node* tmp = alloc_.allocate(1,0); alloc_.construct(tmp, x); // kp::constructor(&tmp->data, x); tmp->first_child=0; tmp->last_child=0; tmp->next_sibling=position.node; if(position.node==0) { // iterator points to end of a subtree tmp->parent=position.parent_; tmp->prev_sibling=position.range_last(); tmp->parent->last_child=tmp; } else { tmp->parent=position.node->parent; tmp->prev_sibling=position.node->prev_sibling; position.node->prev_sibling=tmp; } if(tmp->prev_sibling==0) { if(tmp->parent) // when inserting nodes at the head, there is no parent tmp->parent->first_child=tmp; } else tmp->prev_sibling->next_sibling=tmp; return tmp; } template template iter tree::insert_after(iter position, const T& x) { tree_node* tmp = alloc_.allocate(1,0); alloc_.construct(tmp, x); // kp::constructor(&tmp->data, x); tmp->first_child=0; tmp->last_child=0; tmp->parent=position.node->parent; tmp->prev_sibling=position.node; tmp->next_sibling=position.node->next_sibling; position.node->next_sibling=tmp; if(tmp->next_sibling==0) { if(tmp->parent) // when inserting nodes at the head, there is no parent tmp->parent->last_child=tmp; } else { tmp->next_sibling->prev_sibling=tmp; } return tmp; } template template iter tree::insert_subtree(iter position, const iterator_base& subtree) { // insert dummy iter it=insert(position, value_type()); // replace dummy with subtree return replace(it, subtree); } template template iter tree::insert_subtree_after(iter position, const iterator_base& subtree) { // insert dummy iter it=insert_after(position, value_type()); // replace dummy with subtree return replace(it, subtree); } // template // template // iter tree::insert_subtree(sibling_iterator position, iter subtree) // { // // insert dummy // iter it(insert(position, value_type())); // // replace dummy with subtree // return replace(it, subtree); // } template template iter tree::replace(iter position, const T& x) { // kp::destructor(&position.node->data); // kp::constructor(&position.node->data, x); position.node->data=x; // alloc_.destroy(position.node); // alloc_.construct(position.node, x); return position; } template template iter tree::replace(iter position, const iterator_base& from) { assert(position.node!=head); tree_node *current_from=from.node; tree_node *start_from=from.node; tree_node *current_to =position.node; // replace the node at position with head of the replacement tree at from // std::cout << "warning!" << position.node << std::endl; erase_children(position); // std::cout << "no warning!" << std::endl; tree_node* tmp = alloc_.allocate(1,0); alloc_.construct(tmp, (*from)); // kp::constructor(&tmp->data, (*from)); tmp->first_child=0; tmp->last_child=0; if(current_to->prev_sibling==0) { if(current_to->parent!=0) current_to->parent->first_child=tmp; } else { current_to->prev_sibling->next_sibling=tmp; } tmp->prev_sibling=current_to->prev_sibling; if(current_to->next_sibling==0) { if(current_to->parent!=0) current_to->parent->last_child=tmp; } else { current_to->next_sibling->prev_sibling=tmp; } tmp->next_sibling=current_to->next_sibling; tmp->parent=current_to->parent; // kp::destructor(¤t_to->data); alloc_.destroy(current_to); alloc_.deallocate(current_to,1); current_to=tmp; // only at this stage can we fix 'last' tree_node *last=from.node->next_sibling; pre_order_iterator toit=tmp; // copy all children do { assert(current_from!=0); if(current_from->first_child != 0) { current_from=current_from->first_child; toit=append_child(toit, current_from->data); } else { while(current_from->next_sibling==0 && current_from!=start_from) { current_from=current_from->parent; toit=parent(toit); assert(current_from!=0); } current_from=current_from->next_sibling; if(current_from!=last) { toit=append_child(parent(toit), current_from->data); } } } while(current_from!=last); return current_to; } template typename tree::sibling_iterator tree::replace( sibling_iterator orig_begin, sibling_iterator orig_end, sibling_iterator new_begin, sibling_iterator new_end) { tree_node *orig_first=orig_begin.node; tree_node *new_first=new_begin.node; tree_node *orig_last=orig_first; while((++orig_begin)!=orig_end) orig_last=orig_last->next_sibling; tree_node *new_last=new_first; while((++new_begin)!=new_end) new_last=new_last->next_sibling; // insert all siblings in new_first..new_last before orig_first bool first=true; pre_order_iterator ret; while(1==1) { pre_order_iterator tt=insert_subtree(pre_order_iterator(orig_first), pre_order_iterator(new_first)); if(first) { ret=tt; first=false; } if(new_first==new_last) break; new_first=new_first->next_sibling; } // erase old range of siblings bool last=false; tree_node *next=orig_first; while(1==1) { if(next==orig_last) last=true; next=next->next_sibling; erase((pre_order_iterator)orig_first); if(last) break; orig_first=next; } return ret; } template template iter tree::flatten(iter position) { if(position.node->first_child==0) return position; tree_node *tmp=position.node->first_child; while(tmp) { tmp->parent=position.node->parent; tmp=tmp->next_sibling; } if(position.node->next_sibling) { position.node->last_child->next_sibling=position.node->next_sibling; position.node->next_sibling->prev_sibling=position.node->last_child; } else { position.node->parent->last_child=position.node->last_child; } position.node->next_sibling=position.node->first_child; position.node->next_sibling->prev_sibling=position.node; position.node->first_child=0; position.node->last_child=0; return position; } template template iter tree::reparent(iter position, sibling_iterator begin, sibling_iterator end) { tree_node *first=begin.node; tree_node *last=first; assert(first!=position.node); if(begin==end) return begin; // determine last node while((++begin)!=end) { last=last->next_sibling; } // move subtree if(first->prev_sibling==0) { first->parent->first_child=last->next_sibling; } else { first->prev_sibling->next_sibling=last->next_sibling; } if(last->next_sibling==0) { last->parent->last_child=first->prev_sibling; } else { last->next_sibling->prev_sibling=first->prev_sibling; } if(position.node->first_child==0) { position.node->first_child=first; position.node->last_child=last; first->prev_sibling=0; } else { position.node->last_child->next_sibling=first; first->prev_sibling=position.node->last_child; position.node->last_child=last; } last->next_sibling=0; tree_node *pos=first; for(;;) { pos->parent=position.node; if(pos==last) break; pos=pos->next_sibling; } return first; } template template iter tree::reparent(iter position, iter from) { if(from.node->first_child==0) return position; return reparent(position, from.node->first_child, end(from)); } template template iter tree::wrap(iter position, const T& x) { assert(position.node!=0); sibling_iterator fr=position, to=position; ++to; iter ret = insert(position, x); reparent(ret, fr, to); return ret; } template template iter tree::move_after(iter target, iter source) { tree_node *dst=target.node; tree_node *src=source.node; assert(dst); assert(src); if(dst==src) return source; if(dst->next_sibling) if(dst->next_sibling==src) // already in the right spot return source; // take src out of the tree if(src->prev_sibling!=0) src->prev_sibling->next_sibling=src->next_sibling; else src->parent->first_child=src->next_sibling; if(src->next_sibling!=0) src->next_sibling->prev_sibling=src->prev_sibling; else src->parent->last_child=src->prev_sibling; // connect it to the new point if(dst->next_sibling!=0) dst->next_sibling->prev_sibling=src; else dst->parent->last_child=src; src->next_sibling=dst->next_sibling; dst->next_sibling=src; src->prev_sibling=dst; src->parent=dst->parent; return src; } template template iter tree::move_before(iter target, iter source) { tree_node *dst=target.node; tree_node *src=source.node; assert(dst); assert(src); if(dst==src) return source; if(dst->prev_sibling) if(dst->prev_sibling==src) // already in the right spot return source; // take src out of the tree if(src->prev_sibling!=0) src->prev_sibling->next_sibling=src->next_sibling; else src->parent->first_child=src->next_sibling; if(src->next_sibling!=0) src->next_sibling->prev_sibling=src->prev_sibling; else src->parent->last_child=src->prev_sibling; // connect it to the new point if(dst->prev_sibling!=0) dst->prev_sibling->next_sibling=src; else dst->parent->first_child=src; src->prev_sibling=dst->prev_sibling; dst->prev_sibling=src; src->next_sibling=dst; src->parent=dst->parent; return src; } // specialisation for sibling_iterators template typename tree::sibling_iterator tree::move_before(sibling_iterator target, sibling_iterator source) { tree_node *dst=target.node; tree_node *src=source.node; tree_node *dst_prev_sibling; if(dst==0) { // must then be an end iterator dst_prev_sibling=target.parent_->last_child; assert(dst_prev_sibling); } else dst_prev_sibling=dst->prev_sibling; assert(src); if(dst==src) return source; if(dst_prev_sibling) if(dst_prev_sibling==src) // already in the right spot return source; // take src out of the tree if(src->prev_sibling!=0) src->prev_sibling->next_sibling=src->next_sibling; else src->parent->first_child=src->next_sibling; if(src->next_sibling!=0) src->next_sibling->prev_sibling=src->prev_sibling; else src->parent->last_child=src->prev_sibling; // connect it to the new point if(dst_prev_sibling!=0) dst_prev_sibling->next_sibling=src; else target.parent_->first_child=src; src->prev_sibling=dst_prev_sibling; if(dst) { dst->prev_sibling=src; src->parent=dst->parent; } src->next_sibling=dst; return src; } template template iter tree::move_ontop(iter target, iter source) { tree_node *dst=target.node; tree_node *src=source.node; assert(dst); assert(src); if(dst==src) return source; // if(dst==src->prev_sibling) { // // } // remember connection points tree_node *b_prev_sibling=dst->prev_sibling; tree_node *b_next_sibling=dst->next_sibling; tree_node *b_parent=dst->parent; // remove target erase(target); // take src out of the tree if(src->prev_sibling!=0) src->prev_sibling->next_sibling=src->next_sibling; else src->parent->first_child=src->next_sibling; if(src->next_sibling!=0) src->next_sibling->prev_sibling=src->prev_sibling; else src->parent->last_child=src->prev_sibling; // connect it to the new point if(b_prev_sibling!=0) b_prev_sibling->next_sibling=src; else b_parent->first_child=src; if(b_next_sibling!=0) b_next_sibling->prev_sibling=src; else b_parent->last_child=src; src->prev_sibling=b_prev_sibling; src->next_sibling=b_next_sibling; src->parent=b_parent; return src; } template tree tree::move_out(iterator source) { tree ret; // Move source node into the 'ret' tree. ret.head->next_sibling = source.node; ret.feet->prev_sibling = source.node; source.node->parent=0; // Close the links in the current tree. if(source.node->prev_sibling!=0) source.node->prev_sibling->next_sibling = source.node->next_sibling; if(source.node->next_sibling!=0) source.node->next_sibling->prev_sibling = source.node->prev_sibling; // Fix source prev/next links. source.node->prev_sibling = ret.head; source.node->next_sibling = ret.feet; return ret; // A good compiler will move this, not copy. } template template iter tree::move_in(iter loc, tree& other) { if(other.head->next_sibling==other.feet) return loc; // other tree is empty tree_node *other_first_head = other.head->next_sibling; tree_node *other_last_head = other.feet->prev_sibling; sibling_iterator prev(loc); --prev; prev.node->next_sibling = other_first_head; loc.node->prev_sibling = other_last_head; other_first_head->prev_sibling = prev.node; other_last_head->next_sibling = loc.node; // Adjust parent pointers. tree_node *walk=other_first_head; while(true) { walk->parent=loc.node->parent; if(walk==other_last_head) break; walk=walk->next_sibling; } // Close other tree. other.head->next_sibling=other.feet; other.feet->prev_sibling=other.head; return other_first_head; } template template iter tree::move_in_as_nth_child(iter loc, size_t n, tree& other) { if(other.head->next_sibling==other.feet) return loc; // other tree is empty tree_node *other_first_head = other.head->next_sibling; tree_node *other_last_head = other.feet->prev_sibling; if(n==0) { if(loc.node->first_child==0) { loc.node->first_child=other_first_head; loc.node->last_child=other_last_head; other_last_head->next_sibling=0; other_first_head->prev_sibling=0; } else { loc.node->first_child->prev_sibling=other_last_head; other_last_head->next_sibling=loc.node->first_child; loc.node->first_child=other_first_head; other_first_head->prev_sibling=0; } } else { --n; tree_node *walk = loc.node->first_child; while(true) { if(walk==0) throw std::range_error("tree: move_in_as_nth_child position " +std::to_string(n+1) +" out of range; only " +std::to_string(number_of_children(loc)) +" child nodes present"); if(n==0) break; --n; walk = walk->next_sibling; } if(walk->next_sibling==0) loc.node->last_child=other_last_head; else walk->next_sibling->prev_sibling=other_last_head; other_last_head->next_sibling=walk->next_sibling; walk->next_sibling=other_first_head; other_first_head->prev_sibling=walk; } // Adjust parent pointers. tree_node *walk=other_first_head; while(true) { walk->parent=loc.node; if(walk==other_last_head) break; walk=walk->next_sibling; } // Close other tree. other.head->next_sibling=other.feet; other.feet->prev_sibling=other.head; return other_first_head; } template void tree::merge(sibling_iterator to1, sibling_iterator to2, sibling_iterator from1, sibling_iterator from2, bool duplicate_leaves) { sibling_iterator fnd; while(from1!=from2) { if((fnd=std::find(to1, to2, (*from1))) != to2) { // element found if(from1.begin()==from1.end()) { // full depth reached if(duplicate_leaves) append_child(parent(to1), (*from1)); } else { // descend further merge(fnd.begin(), fnd.end(), from1.begin(), from1.end(), duplicate_leaves); } } else { // element missing insert_subtree(to2, from1); } ++from1; } } template void tree::sort(sibling_iterator from, sibling_iterator to, bool deep) { std::less comp; sort(from, to, comp, deep); } template template void tree::sort(sibling_iterator from, sibling_iterator to, StrictWeakOrdering comp, bool deep) { if(from==to) return; // make list of sorted nodes // CHECK: if multiset stores equivalent nodes in the order in which they // are inserted, then this routine should be called 'stable_sort'. std::multiset > nodes(comp); sibling_iterator it=from, it2=to; while(it != to) { nodes.insert(it.node); ++it; } // reassemble --it2; // prev and next are the nodes before and after the sorted range tree_node *prev=from.node->prev_sibling; tree_node *next=it2.node->next_sibling; typename std::multiset >::iterator nit=nodes.begin(), eit=nodes.end(); if(prev==0) { if((*nit)->parent!=0) // to catch "sorting the head" situations, when there is no parent (*nit)->parent->first_child=(*nit); } else prev->next_sibling=(*nit); --eit; while(nit!=eit) { (*nit)->prev_sibling=prev; if(prev) prev->next_sibling=(*nit); prev=(*nit); ++nit; } // prev now points to the last-but-one node in the sorted range if(prev) prev->next_sibling=(*eit); // eit points to the last node in the sorted range. (*eit)->next_sibling=next; (*eit)->prev_sibling=prev; // missed in the loop above if(next==0) { if((*eit)->parent!=0) // to catch "sorting the head" situations, when there is no parent (*eit)->parent->last_child=(*eit); } else next->prev_sibling=(*eit); if(deep) { // sort the children of each node too sibling_iterator bcs(*nodes.begin()); sibling_iterator ecs(*eit); ++ecs; while(bcs!=ecs) { sort(begin(bcs), end(bcs), comp, deep); ++bcs; } } } template template bool tree::equal(const iter& one_, const iter& two, const iter& three_) const { std::equal_to comp; return equal(one_, two, three_, comp); } template template bool tree::equal_subtree(const iter& one_, const iter& two_) const { std::equal_to comp; return equal_subtree(one_, two_, comp); } template template bool tree::equal(const iter& one_, const iter& two, const iter& three_, BinaryPredicate fun) const { pre_order_iterator one(one_), three(three_); // if(one==two && is_valid(three) && three.number_of_children()!=0) // return false; while(one!=two && is_valid(three)) { if(!fun(*one,*three)) return false; if(one.number_of_children()!=three.number_of_children()) return false; ++one; ++three; } return true; } template template bool tree::equal_subtree(const iter& one_, const iter& two_, BinaryPredicate fun) const { pre_order_iterator one(one_), two(two_); if(!fun(*one,*two)) return false; if(number_of_children(one)!=number_of_children(two)) return false; return equal(begin(one),end(one),begin(two),fun); } template tree tree::subtree(sibling_iterator from, sibling_iterator to) const { tree tmp; tmp.set_head(value_type()); tmp.replace(tmp.begin(), tmp.end(), from, to); return tmp; } template void tree::subtree(tree& tmp, sibling_iterator from, sibling_iterator to) const { tmp.set_head(value_type()); tmp.replace(tmp.begin(), tmp.end(), from, to); } template size_t tree::size() const { size_t i=0; pre_order_iterator it=begin(), eit=end(); while(it!=eit) { ++i; ++it; } return i; } template size_t tree::size(const iterator_base& top) const { size_t i=0; pre_order_iterator it=top, eit=top; eit.skip_children(); ++eit; while(it!=eit) { ++i; ++it; } return i; } template bool tree::empty() const { pre_order_iterator it=begin(), eit=end(); return (it==eit); } template int tree::depth(const iterator_base& it) { tree_node* pos=it.node; assert(pos!=0); int ret=0; while(pos->parent!=0) { pos=pos->parent; ++ret; } return ret; } template int tree::depth(const iterator_base& it, const iterator_base& root) { tree_node* pos=it.node; assert(pos!=0); int ret=0; while(pos->parent!=0 && pos!=root.node) { pos=pos->parent; ++ret; } return ret; } template int tree::max_depth() const { int maxd=-1; for(tree_node *it = head->next_sibling; it!=feet; it=it->next_sibling) maxd=std::max(maxd, max_depth(it)); return maxd; } template int tree::max_depth(const iterator_base& pos) const { tree_node *tmp=pos.node; if(tmp==0 || tmp==head || tmp==feet) return -1; int curdepth=0, maxdepth=0; while(true) { // try to walk the bottom of the tree while(tmp->first_child==0) { if(tmp==pos.node) return maxdepth; if(tmp->next_sibling==0) { // try to walk up and then right again do { tmp=tmp->parent; if(tmp==0) return maxdepth; --curdepth; } while(tmp->next_sibling==0); } if(tmp==pos.node) return maxdepth; tmp=tmp->next_sibling; } tmp=tmp->first_child; ++curdepth; maxdepth=std::max(curdepth, maxdepth); } } template unsigned int tree::number_of_children(const iterator_base& it) { tree_node *pos=it.node->first_child; if(pos==0) return 0; unsigned int ret=1; // while(pos!=it.node->last_child) { // ++ret; // pos=pos->next_sibling; // } while((pos=pos->next_sibling)) ++ret; return ret; } template unsigned int tree::number_of_siblings(const iterator_base& it) const { tree_node *pos=it.node; unsigned int ret=0; // count forward while(pos->next_sibling && pos->next_sibling!=head && pos->next_sibling!=feet) { ++ret; pos=pos->next_sibling; } // count backward pos=it.node; while(pos->prev_sibling && pos->prev_sibling!=head && pos->prev_sibling!=feet) { ++ret; pos=pos->prev_sibling; } return ret; } template void tree::swap(sibling_iterator it) { tree_node *nxt=it.node->next_sibling; if(nxt) { if(it.node->prev_sibling) it.node->prev_sibling->next_sibling=nxt; else it.node->parent->first_child=nxt; nxt->prev_sibling=it.node->prev_sibling; tree_node *nxtnxt=nxt->next_sibling; if(nxtnxt) nxtnxt->prev_sibling=it.node; else it.node->parent->last_child=it.node; nxt->next_sibling=it.node; it.node->prev_sibling=nxt; it.node->next_sibling=nxtnxt; } } template void tree::swap(iterator one, iterator two) { // if one and two are adjacent siblings, use the sibling swap if(one.node->next_sibling==two.node) swap(one); else if(two.node->next_sibling==one.node) swap(two); else { tree_node *nxt1=one.node->next_sibling; tree_node *nxt2=two.node->next_sibling; tree_node *pre1=one.node->prev_sibling; tree_node *pre2=two.node->prev_sibling; tree_node *par1=one.node->parent; tree_node *par2=two.node->parent; // reconnect one.node->parent=par2; one.node->next_sibling=nxt2; if(nxt2) nxt2->prev_sibling=one.node; else par2->last_child=one.node; one.node->prev_sibling=pre2; if(pre2) pre2->next_sibling=one.node; else par2->first_child=one.node; two.node->parent=par1; two.node->next_sibling=nxt1; if(nxt1) nxt1->prev_sibling=two.node; else par1->last_child=two.node; two.node->prev_sibling=pre1; if(pre1) pre1->next_sibling=two.node; else par1->first_child=two.node; } } // template // tree::iterator tree::find_subtree( // sibling_iterator subfrom, sibling_iterator subto, iterator from, iterator to, // BinaryPredicate fun) const // { // assert(1==0); // this routine is not finished yet. // while(from!=to) { // if(fun(*subfrom, *from)) { // // } // } // return to; // } template bool tree::is_in_subtree(const iterator_base& it, const iterator_base& begin, const iterator_base& end) const { // FIXME: this should be optimised. pre_order_iterator tmp=begin; while(tmp!=end) { if(tmp==it) return true; ++tmp; } return false; } template bool tree::is_valid(const iterator_base& it) const { if(it.node==0 || it.node==feet || it.node==head) return false; else return true; } template typename tree::iterator tree::lowest_common_ancestor( const iterator_base& one, const iterator_base& two) const { std::set parents; // Walk up from 'one' storing all parents. iterator walk=one; do { walk=parent(walk); parents.insert(walk); } while( is_valid(parent(walk)) ); // Walk up from 'two' until we encounter a node in parents. walk=two; do { walk=parent(walk); if(parents.find(walk) != parents.end()) break; } while( is_valid(parent(walk)) ); return walk; } template unsigned int tree::index(sibling_iterator it) const { unsigned int ind=0; if(it.node->parent==0) { while(it.node->prev_sibling!=head) { it.node=it.node->prev_sibling; ++ind; } } else { while(it.node->prev_sibling!=0) { it.node=it.node->prev_sibling; ++ind; } } return ind; } template typename tree::sibling_iterator tree::sibling(const iterator_base& it, unsigned int num) { tree_node *tmp; if(it.node->parent==0) { tmp=head->next_sibling; while(num) { tmp = tmp->next_sibling; --num; } } else { tmp=it.node->parent->first_child; while(num) { assert(tmp!=0); tmp = tmp->next_sibling; --num; } } return tmp; } template void tree::debug_verify_consistency() const { iterator it=begin(); while(it!=end()) { if(it.node->parent!=0) { if(it.node->prev_sibling==0) assert(it.node->parent->first_child==it.node); else assert(it.node->prev_sibling->next_sibling==it.node); if(it.node->next_sibling==0) assert(it.node->parent->last_child==it.node); else assert(it.node->next_sibling->prev_sibling==it.node); } ++it; } } template typename tree::sibling_iterator tree::child(const iterator_base& it, unsigned int num) { tree_node *tmp=it.node->first_child; while(num--) { assert(tmp!=0); tmp=tmp->next_sibling; } return tmp; } // Iterator base template tree::iterator_base::iterator_base() : node(0), skip_current_children_(false) { } template tree::iterator_base::iterator_base(tree_node *tn) : node(tn), skip_current_children_(false) { } template T& tree::iterator_base::operator*() const { return node->data; } template T* tree::iterator_base::operator->() const { return &(node->data); } template bool tree::post_order_iterator::operator!=(const post_order_iterator& other) const { if(other.node!=this->node) return true; else return false; } template bool tree::post_order_iterator::operator==(const post_order_iterator& other) const { if(other.node==this->node) return true; else return false; } template bool tree::pre_order_iterator::operator!=(const pre_order_iterator& other) const { if(other.node!=this->node) return true; else return false; } template bool tree::pre_order_iterator::operator==(const pre_order_iterator& other) const { if(other.node==this->node) return true; else return false; } template bool tree::sibling_iterator::operator!=(const sibling_iterator& other) const { if(other.node!=this->node) return true; else return false; } template bool tree::sibling_iterator::operator==(const sibling_iterator& other) const { if(other.node==this->node) return true; else return false; } template bool tree::leaf_iterator::operator!=(const leaf_iterator& other) const { if(other.node!=this->node) return true; else return false; } template bool tree::leaf_iterator::operator==(const leaf_iterator& other) const { if(other.node==this->node && other.top_node==this->top_node) return true; else return false; } template typename tree::sibling_iterator tree::iterator_base::begin() const { if(node->first_child==0) return end(); sibling_iterator ret(node->first_child); ret.parent_=this->node; return ret; } template typename tree::sibling_iterator tree::iterator_base::end() const { sibling_iterator ret(0); ret.parent_=node; return ret; } template void tree::iterator_base::skip_children() { skip_current_children_=true; } template void tree::iterator_base::skip_children(bool skip) { skip_current_children_=skip; } template unsigned int tree::iterator_base::number_of_children() const { tree_node *pos=node->first_child; if(pos==0) return 0; unsigned int ret=1; while(pos!=node->last_child) { ++ret; pos=pos->next_sibling; } return ret; } // Pre-order iterator template tree::pre_order_iterator::pre_order_iterator() : iterator_base(0) { } template tree::pre_order_iterator::pre_order_iterator(tree_node *tn) : iterator_base(tn) { } template tree::pre_order_iterator::pre_order_iterator(const iterator_base &other) : iterator_base(other.node) { } template tree::pre_order_iterator::pre_order_iterator(const sibling_iterator& other) : iterator_base(other.node) { if(this->node==0) { if(other.range_last()!=0) this->node=other.range_last(); else this->node=other.parent_; this->skip_children(); ++(*this); } } template typename tree::pre_order_iterator& tree::pre_order_iterator::operator++() { assert(this->node!=0); if(!this->skip_current_children_ && this->node->first_child != 0) { this->node=this->node->first_child; } else { this->skip_current_children_=false; while(this->node->next_sibling==0) { this->node=this->node->parent; if(this->node==0) return *this; } this->node=this->node->next_sibling; } return *this; } template typename tree::pre_order_iterator& tree::pre_order_iterator::operator--() { assert(this->node!=0); if(this->node->prev_sibling) { this->node=this->node->prev_sibling; while(this->node->last_child) this->node=this->node->last_child; } else { this->node=this->node->parent; if(this->node==0) return *this; } return *this; } template typename tree::pre_order_iterator tree::pre_order_iterator::operator++(int) { pre_order_iterator copy = *this; ++(*this); return copy; } template typename tree::pre_order_iterator& tree::pre_order_iterator::next_skip_children() { (*this).skip_children(); (*this)++; return *this; } template typename tree::pre_order_iterator tree::pre_order_iterator::operator--(int) { pre_order_iterator copy = *this; --(*this); return copy; } template typename tree::pre_order_iterator& tree::pre_order_iterator::operator+=(unsigned int num) { while(num>0) { ++(*this); --num; } return (*this); } template typename tree::pre_order_iterator& tree::pre_order_iterator::operator-=(unsigned int num) { while(num>0) { --(*this); --num; } return (*this); } // Post-order iterator template tree::post_order_iterator::post_order_iterator() : iterator_base(0) { } template tree::post_order_iterator::post_order_iterator(tree_node *tn) : iterator_base(tn) { } template tree::post_order_iterator::post_order_iterator(const iterator_base &other) : iterator_base(other.node) { } template tree::post_order_iterator::post_order_iterator(const sibling_iterator& other) : iterator_base(other.node) { if(this->node==0) { if(other.range_last()!=0) this->node=other.range_last(); else this->node=other.parent_; this->skip_children(); ++(*this); } } template typename tree::post_order_iterator& tree::post_order_iterator::operator++() { assert(this->node!=0); if(this->node->next_sibling==0) { this->node=this->node->parent; this->skip_current_children_=false; } else { this->node=this->node->next_sibling; if(this->skip_current_children_) { this->skip_current_children_=false; } else { while(this->node->first_child) this->node=this->node->first_child; } } return *this; } template typename tree::post_order_iterator& tree::post_order_iterator::operator--() { assert(this->node!=0); if(this->skip_current_children_ || this->node->last_child==0) { this->skip_current_children_=false; while(this->node->prev_sibling==0) this->node=this->node->parent; this->node=this->node->prev_sibling; } else { this->node=this->node->last_child; } return *this; } template typename tree::post_order_iterator tree::post_order_iterator::operator++(int) { post_order_iterator copy = *this; ++(*this); return copy; } template typename tree::post_order_iterator tree::post_order_iterator::operator--(int) { post_order_iterator copy = *this; --(*this); return copy; } template typename tree::post_order_iterator& tree::post_order_iterator::operator+=(unsigned int num) { while(num>0) { ++(*this); --num; } return (*this); } template typename tree::post_order_iterator& tree::post_order_iterator::operator-=(unsigned int num) { while(num>0) { --(*this); --num; } return (*this); } template void tree::post_order_iterator::descend_all() { assert(this->node!=0); while(this->node->first_child) this->node=this->node->first_child; } // Breadth-first iterator template tree::breadth_first_queued_iterator::breadth_first_queued_iterator() : iterator_base() { } template tree::breadth_first_queued_iterator::breadth_first_queued_iterator(tree_node *tn) : iterator_base(tn) { traversal_queue.push(tn); } template tree::breadth_first_queued_iterator::breadth_first_queued_iterator(const iterator_base& other) : iterator_base(other.node) { traversal_queue.push(other.node); } template bool tree::breadth_first_queued_iterator::operator!=(const breadth_first_queued_iterator& other) const { if(other.node!=this->node) return true; else return false; } template bool tree::breadth_first_queued_iterator::operator==(const breadth_first_queued_iterator& other) const { if(other.node==this->node) return true; else return false; } template typename tree::breadth_first_queued_iterator& tree::breadth_first_queued_iterator::operator++() { assert(this->node!=0); // Add child nodes and pop current node sibling_iterator sib=this->begin(); while(sib!=this->end()) { traversal_queue.push(sib.node); ++sib; } traversal_queue.pop(); if(traversal_queue.size()>0) this->node=traversal_queue.front(); else this->node=0; return (*this); } template typename tree::breadth_first_queued_iterator tree::breadth_first_queued_iterator::operator++(int) { breadth_first_queued_iterator copy = *this; ++(*this); return copy; } template typename tree::breadth_first_queued_iterator& tree::breadth_first_queued_iterator::operator+=(unsigned int num) { while(num>0) { ++(*this); --num; } return (*this); } // Fixed depth iterator template tree::fixed_depth_iterator::fixed_depth_iterator() : iterator_base() { } template tree::fixed_depth_iterator::fixed_depth_iterator(tree_node *tn) : iterator_base(tn), top_node(0) { } template tree::fixed_depth_iterator::fixed_depth_iterator(const iterator_base& other) : iterator_base(other.node), top_node(0) { } template tree::fixed_depth_iterator::fixed_depth_iterator(const sibling_iterator& other) : iterator_base(other.node), top_node(0) { } template tree::fixed_depth_iterator::fixed_depth_iterator(const fixed_depth_iterator& other) : iterator_base(other.node), top_node(other.top_node) { } template bool tree::fixed_depth_iterator::operator==(const fixed_depth_iterator& other) const { if(other.node==this->node && other.top_node==top_node) return true; else return false; } template bool tree::fixed_depth_iterator::operator!=(const fixed_depth_iterator& other) const { if(other.node!=this->node || other.top_node!=top_node) return true; else return false; } template typename tree::fixed_depth_iterator& tree::fixed_depth_iterator::operator++() { assert(this->node!=0); if(this->node->next_sibling) { this->node=this->node->next_sibling; } else { int relative_depth=0; upper: do { if(this->node==this->top_node) { this->node=0; // FIXME: return a proper fixed_depth end iterator once implemented return *this; } this->node=this->node->parent; if(this->node==0) return *this; --relative_depth; } while(this->node->next_sibling==0); lower: this->node=this->node->next_sibling; while(this->node->first_child==0) { if(this->node->next_sibling==0) goto upper; this->node=this->node->next_sibling; if(this->node==0) return *this; } while(relative_depth<0 && this->node->first_child!=0) { this->node=this->node->first_child; ++relative_depth; } if(relative_depth<0) { if(this->node->next_sibling==0) goto upper; else goto lower; } } return *this; } template typename tree::fixed_depth_iterator& tree::fixed_depth_iterator::operator--() { assert(this->node!=0); if(this->node->prev_sibling) { this->node=this->node->prev_sibling; } else { int relative_depth=0; upper: do { if(this->node==this->top_node) { this->node=0; return *this; } this->node=this->node->parent; if(this->node==0) return *this; --relative_depth; } while(this->node->prev_sibling==0); lower: this->node=this->node->prev_sibling; while(this->node->last_child==0) { if(this->node->prev_sibling==0) goto upper; this->node=this->node->prev_sibling; if(this->node==0) return *this; } while(relative_depth<0 && this->node->last_child!=0) { this->node=this->node->last_child; ++relative_depth; } if(relative_depth<0) { if(this->node->prev_sibling==0) goto upper; else goto lower; } } return *this; // // // assert(this->node!=0); // if(this->node->prev_sibling!=0) { // this->node=this->node->prev_sibling; // assert(this->node!=0); // if(this->node->parent==0 && this->node->prev_sibling==0) // head element // this->node=0; // } // else { // tree_node *par=this->node->parent; // do { // par=par->prev_sibling; // if(par==0) { // FIXME: need to keep track of this! // this->node=0; // return *this; // } // } while(par->last_child==0); // this->node=par->last_child; // } // return *this; } template typename tree::fixed_depth_iterator tree::fixed_depth_iterator::operator++(int) { fixed_depth_iterator copy = *this; ++(*this); return copy; } template typename tree::fixed_depth_iterator tree::fixed_depth_iterator::operator--(int) { fixed_depth_iterator copy = *this; --(*this); return copy; } template typename tree::fixed_depth_iterator& tree::fixed_depth_iterator::operator-=(unsigned int num) { while(num>0) { --(*this); --(num); } return (*this); } template typename tree::fixed_depth_iterator& tree::fixed_depth_iterator::operator+=(unsigned int num) { while(num>0) { ++(*this); --(num); } return *this; } // Sibling iterator template tree::sibling_iterator::sibling_iterator() : iterator_base() { set_parent_(); } template tree::sibling_iterator::sibling_iterator(tree_node *tn) : iterator_base(tn) { set_parent_(); } template tree::sibling_iterator::sibling_iterator(const iterator_base& other) : iterator_base(other.node) { set_parent_(); } template tree::sibling_iterator::sibling_iterator(const sibling_iterator& other) : iterator_base(other), parent_(other.parent_) { } template void tree::sibling_iterator::set_parent_() { parent_=0; if(this->node==0) return; if(this->node->parent!=0) parent_=this->node->parent; } template typename tree::sibling_iterator& tree::sibling_iterator::operator++() { if(this->node) this->node=this->node->next_sibling; return *this; } template typename tree::sibling_iterator& tree::sibling_iterator::operator--() { if(this->node) this->node=this->node->prev_sibling; else { assert(parent_); this->node=parent_->last_child; } return *this; } template typename tree::sibling_iterator tree::sibling_iterator::operator++(int) { sibling_iterator copy = *this; ++(*this); return copy; } template typename tree::sibling_iterator tree::sibling_iterator::operator--(int) { sibling_iterator copy = *this; --(*this); return copy; } template typename tree::sibling_iterator& tree::sibling_iterator::operator+=(unsigned int num) { while(num>0) { ++(*this); --num; } return (*this); } template typename tree::sibling_iterator& tree::sibling_iterator::operator-=(unsigned int num) { while(num>0) { --(*this); --num; } return (*this); } template typename tree::tree_node *tree::sibling_iterator::range_first() const { tree_node *tmp=parent_->first_child; return tmp; } template typename tree::tree_node *tree::sibling_iterator::range_last() const { return parent_->last_child; } // Leaf iterator template tree::leaf_iterator::leaf_iterator() : iterator_base(0), top_node(0) { } template tree::leaf_iterator::leaf_iterator(tree_node *tn, tree_node *top) : iterator_base(tn), top_node(top) { } template tree::leaf_iterator::leaf_iterator(const iterator_base &other) : iterator_base(other.node), top_node(0) { } template tree::leaf_iterator::leaf_iterator(const sibling_iterator& other) : iterator_base(other.node), top_node(0) { if(this->node==0) { if(other.range_last()!=0) this->node=other.range_last(); else this->node=other.parent_; ++(*this); } } template typename tree::leaf_iterator& tree::leaf_iterator::operator++() { assert(this->node!=0); if(this->node->first_child!=0) { // current node is no longer leaf (children got added) while(this->node->first_child) this->node=this->node->first_child; } else { while(this->node->next_sibling==0) { if (this->node->parent==0) return *this; this->node=this->node->parent; if (top_node != 0 && this->node==top_node) return *this; } this->node=this->node->next_sibling; while(this->node->first_child) this->node=this->node->first_child; } return *this; } template typename tree::leaf_iterator& tree::leaf_iterator::operator--() { assert(this->node!=0); while (this->node->prev_sibling==0) { if (this->node->parent==0) return *this; this->node=this->node->parent; if (top_node !=0 && this->node==top_node) return *this; } this->node=this->node->prev_sibling; while(this->node->last_child) this->node=this->node->last_child; return *this; } template typename tree::leaf_iterator tree::leaf_iterator::operator++(int) { leaf_iterator copy = *this; ++(*this); return copy; } template typename tree::leaf_iterator tree::leaf_iterator::operator--(int) { leaf_iterator copy = *this; --(*this); return copy; } template typename tree::leaf_iterator& tree::leaf_iterator::operator+=(unsigned int num) { while(num>0) { ++(*this); --num; } return (*this); } template typename tree::leaf_iterator& tree::leaf_iterator::operator-=(unsigned int num) { while(num>0) { --(*this); --num; } return (*this); } #endif // Local variables: // default-tab-width: 3 // End: ================================================ FILE: client_server/websocket_client.cc ================================================ #include "websocket_client.hh" #include websocket_client::websocket_client() : ssl_ctx_(boost::asio::ssl::context::sslv23_client) // tlsv12_client) , resolver_(ioc_) , is_ssl_(false) { ssl_ctx_.set_default_verify_paths(); #ifdef __APPLE__ #include #if TARGET_OS_IPHONE ssl_ctx_.set_verify_mode(boost::asio::ssl::verify_none); #else // on MacOS or related, include the system certificate chain. ssl_ctx_.set_verify_mode(boost::asio::ssl::verify_none); // ssl_ctx_.load_verify_file("/etc/ssl/cert.pem"); // ssl_ctx_.set_verify_mode(boost::asio::ssl::verify_peer); // Configure SSL context to be more permissive #endif // ssl_ctx_.set_options(boost::asio::ssl::context::default_workarounds | // boost::asio::ssl::context::no_sslv2 | // boost::asio::ssl::context::no_sslv3); #else ssl_ctx_.set_verify_mode(boost::asio::ssl::verify_peer); #endif // ssl_ctx_.set_verify_callback([](bool preverified, ssl::verify_context& ctx) { // // Log verification details for debugging // char subject_name[256]; // X509* cert = X509_STORE_CTX_get_current_cert(ctx.native_handle()); // X509_NAME_oneline(X509_get_subject_name(cert), subject_name, 256); // std::cerr << "websocket_client::verify_callback: " << subject_name << "\n"; // return preverified; // }); } websocket_client::~websocket_client() { boost::beast::error_code ec; // ignored errors if (ws_stream_) { ws_stream_->close(boost::beast::websocket::close_code::normal, ec); boost::beast::get_lowest_layer(*ws_stream_).shutdown(boost::asio::ip::tcp::socket::shutdown_both, ec); } if (wss_stream_) { wss_stream_->close(boost::beast::websocket::close_code::normal, ec); wss_stream_->next_layer().shutdown(ec); boost::beast::get_lowest_layer(*wss_stream_).shutdown(boost::asio::ip::tcp::socket::shutdown_both, ec); } ioc_.stop(); } void websocket_client::set_message_handler(message_handler h) { message_handler_ = std::move(h); } void websocket_client::set_connect_handler(connect_handler h) { connect_handler_ = std::move(h); } void websocket_client::set_close_handler(close_handler h) { close_handler_ = std::move(h); } void websocket_client::set_fail_handler(fail_handler h) { fail_handler_ = std::move(h); } void websocket_client::connect(const std::string& uri_string) { // Parse URI (basic) Uri uri(uri_string); is_ssl_ = uri.protocol=="wss"; host_ = uri.host; port_ = uri.port.empty() ? (is_ssl_ ? "443" : "80") : uri.port; path_ = uri.path; // Create appropriate stream if (is_ssl_) { wss_stream_ = std::make_unique>>(ioc_, ssl_ctx_); wss_stream_->binary(false); // Set to text mode wss_stream_->auto_fragment(false); // Don't fragment messages wss_stream_->read_message_max(64 * 1024 * 1024); // 64MB max message size // ssl_ctx_.set_verify_callback( // [host](bool preverified, ssl::verify_context& ctx) { // // You can add debug logging here to see verification attempts // std::cerr << "Verifying certificate: " << preverified << std::endl; // return preverified; // }); // wss_stream_->next_layer().set_server_hostname(host_); wss_stream_->set_option(boost::beast::websocket::stream_base::decorator( [](boost::beast::websocket::request_type& req) { req.version(11); req.set(boost::beast::http::field::user_agent, "WebSocket-Client/1.0"); // Log all headers // for (auto const& field : req) { // std::cerr << field.name_string() << ": " // << field.value() << "\n"; // } })); } else { ws_stream_ = std::make_unique>(ioc_); ws_stream_->binary(false); // Set to text mode ws_stream_->auto_fragment(false); // Don't fragment messages ws_stream_->read_message_max(64 * 1024 * 1024); // 64MB max message size } // Start the connection process resolver_.async_resolve(host_, port_, [this](const boost::beast::error_code& ec, boost::asio::ip::tcp::resolver::results_type results) { on_resolve(ec, results); }); } void websocket_client::on_resolve(const boost::beast::error_code& ec, boost::asio::ip::tcp::resolver::results_type results) { if (ec) return fail(ec); if (is_ssl_) { boost::asio::async_connect( wss_stream_->next_layer().next_layer(), results, [this](const boost::beast::error_code& ec, const boost::asio::ip::tcp::endpoint& /*endpoint*/) { on_connect(ec); }); } else { boost::asio::async_connect( ws_stream_->next_layer(), results, [this](const boost::beast::error_code& ec, const boost::asio::ip::tcp::endpoint& /*endpoint*/) { on_connect(ec); }); } } void websocket_client::on_connect(const boost::beast::error_code& ec) { if (ec) return fail(ec); if (is_ssl_) { if (!SSL_set_tlsext_host_name( wss_stream_->next_layer().native_handle(), host_.c_str())) { throw boost::beast::system_error{ boost::beast::error_code{ static_cast(::ERR_get_error()), boost::beast::net::error::get_ssl_category()}}; } wss_stream_->next_layer().async_handshake( boost::asio::ssl::stream_base::client, [this](const boost::beast::error_code& ec) { on_ssl_handshake(ec); }); } else { ws_stream_->async_handshake(host_, path_, [this](const boost::beast::error_code& ec) { on_handshake(ec); }); } } void websocket_client::on_ssl_handshake(const boost::beast::error_code& ec) { if (ec) return fail(ec); wss_stream_->async_handshake(host_, path_, [this](const boost::beast::error_code& ec) { on_handshake(ec); }); } void websocket_client::on_handshake(const boost::beast::error_code& ec) { if (ec) return fail(ec); if (connect_handler_) { connect_handler_(); } do_read(); } void websocket_client::send(const std::string& message) { // Beast does not allow us to run two `async_write` at the // same time; we have to wait for the completion handler // to be called. // Create a new buffer for this message queued_message msg; msg.data = message; msg.buffer = std::make_shared(); boost::beast::ostream(*msg.buffer) << msg.data; message_queue_.push(msg); if (!writing_) do_write(); } void websocket_client::on_write(const boost::beast::error_code& ec, std::size_t /* bytes_transferred */) { if(ec) { if(fail_handler_) { fail_handler_(ec); } return; } // Remove the message and the associated beast buffer. message_queue_.pop(); // Write next message, if any. do_write(); } void websocket_client::do_write() { if (message_queue_.empty()) { writing_ = false; return; } writing_ = true; auto& msg = message_queue_.front(); if (is_ssl_) { wss_stream_->async_write( msg.buffer->data(), [this](boost::beast::error_code ec, std::size_t bytes_transferred) { on_write(ec, bytes_transferred); }); } else { ws_stream_->async_write( msg.buffer->data(), [this](boost::beast::error_code ec, std::size_t bytes_transferred) { on_write(ec, bytes_transferred); }); } } void websocket_client::do_read() { if (is_ssl_) { wss_stream_->async_read( buffer_, [this](const boost::beast::error_code& ec, std::size_t bytes) { on_read(ec, bytes); }); } else { ws_stream_->async_read( buffer_, [this](const boost::beast::error_code& ec, std::size_t bytes) { on_read(ec, bytes); }); } } void websocket_client::on_read(const boost::beast::error_code& ec, std::size_t /* bytes_transferred */) { if (ec) return fail(ec); if (message_handler_) { message_handler_(boost::beast::buffers_to_string(buffer_.data())); } buffer_.consume(buffer_.size()); do_read(); } void websocket_client::close() { if (is_ssl_) { wss_stream_->async_close( boost::beast::websocket::close_code::normal, [this](const boost::beast::error_code& ec) { on_close(ec); }); } else { ws_stream_->async_close( boost::beast::websocket::close_code::normal, [this](const boost::beast::error_code& ec) { on_close(ec); }); } } void websocket_client::on_close(const boost::beast::error_code& ec) { if (ec) return fail(ec); if (close_handler_) { close_handler_(); } } void websocket_client::fail(const boost::beast::error_code& ec) { if (fail_handler_) { fail_handler_(ec); } } void websocket_client::run() { ioc_.run(); } void websocket_client::stop() { ioc_.stop(); } Uri::Uri(const std::string& uri) { path = "/"; // default path std::regex pattern("^([^:]+)://([^/:]+)(?::(\\d+))?(/.*)?"); std::smatch matches; if (std::regex_match(uri, matches, pattern)) { protocol = matches[1]; host = matches[2]; if (matches[3].matched) port = matches[3]; if (matches[4].matched) path = matches[4]; } } std::string Uri::to_string() const { std::string result = protocol + "://" + host; if (!port.empty()) result += ":" + port; result += path; return result; } ================================================ FILE: client_server/websocket_client.hh ================================================ #pragma once #include #include #include #include #include #include #include #include #include #include #include class websocket_client { public: // Callback handlers using message_handler = std::function; using connect_handler = std::function; using close_handler = std::function; using fail_handler = std::function; websocket_client(); ~websocket_client(); // No copying websocket_client(const websocket_client&) = delete; websocket_client& operator=(const websocket_client&) = delete; // Set handlers (all optional) void set_message_handler(message_handler h); void set_connect_handler(connect_handler h); void set_close_handler(close_handler h); void set_fail_handler(fail_handler h); // Async operations (all return immediately) void connect(const std::string& uri); // ws:// or wss:// void send(const std::string& message); void close(); void run(); void stop(); private: void on_resolve(const boost::beast::error_code& ec, boost::asio::ip::tcp::resolver::results_type results); void on_connect(const boost::beast::error_code& ec); void on_ssl_handshake(const boost::beast::error_code& ec); void on_handshake(const boost::beast::error_code& ec); void on_write(const boost::beast::error_code& ec, std::size_t bytes_transferred); void on_read(const boost::beast::error_code& ec, std::size_t bytes_transferred); void on_close(const boost::beast::error_code& ec); void do_read(); void do_write(); void fail(const boost::beast::error_code& ec); // State boost::asio::io_context ioc_; boost::asio::ssl::context ssl_ctx_; boost::asio::ip::tcp::resolver resolver_; std::unique_ptr>> wss_stream_; std::unique_ptr> ws_stream_; boost::beast::flat_buffer buffer_; bool is_ssl_; std::string host_, port_, path_; // Handlers message_handler message_handler_; connect_handler connect_handler_; close_handler close_handler_; fail_handler fail_handler_; // Message queue. struct queued_message { std::string data; std::shared_ptr buffer; }; std::queue message_queue_; bool writing_{false}; }; class Uri { public: Uri(const std::string& uri); std::string to_string() const; std::string protocol; std::string host; std::string port; std::string path; }; ================================================ FILE: client_server/websocket_server.cc ================================================ #include "websocket_server.hh" #include websocket_server::connection::connection(boost::asio::io_context& ioc, websocket_server& server, id_type id) : socket_(ioc) , server_(server) , id_(id) { } void websocket_server::connection::start() { // First read as HTTP boost::beast::http::async_read( socket_, buffer_, http_request_, [self = shared_from_this()]( boost::beast::error_code ec, std::size_t bytes_transferred) { self->on_read_request(ec, bytes_transferred); }); } void websocket_server::connection::on_read_request(boost::beast::error_code ec, std::size_t) { if (ec) { server_.remove_connection(id_); return; } if (boost::beast::websocket::is_upgrade(http_request_)) { // Handle as WebSocket is_websocket_ = true; ws_stream_.emplace(socket_); ws_stream_->async_accept( http_request_, [self = shared_from_this()](boost::beast::error_code ec) { self->on_websocket_accept(ec); }); } else { // Handle as HTTP handle_http_request(); } } void websocket_server::connection::handle_http_request() { // Create response that lives through the async operation auto response = std::make_shared>(); if (server_.http_handler_) { server_.http_handler_(http_request_, *response); } else { response->result(boost::beast::http::status::not_found); response->version(http_request_.version()); response->set(boost::beast::http::field::server, "Beast"); response->set(boost::beast::http::field::content_type, "text/plain"); response->body() = "404 Not Found\r\n"; } response->prepare_payload(); boost::beast::http::async_write( socket_, *response, [self = shared_from_this(), response]( // Keep response alive in lambda boost::beast::error_code ec, std::size_t) { if (ec) { self->server_.remove_connection(self->id_); return; } // HTTP is done, close the connection boost::beast::error_code sec; self->socket_.shutdown(boost::asio::ip::tcp::socket::shutdown_send, sec); self->server_.remove_connection(self->id_); }); } void websocket_server::connection::on_websocket_accept(boost::beast::error_code ec) { if (ec) { server_.remove_connection(id_); return; } if (server_.connect_handler_) { server_.connect_handler_(id_); } do_read_websocket(); } void websocket_server::connection::do_read_websocket() { ws_stream_->async_read( buffer_, [self = shared_from_this()]( boost::beast::error_code ec, std::size_t bytes_transferred) { self->on_read_websocket(ec, bytes_transferred); }); } void websocket_server::connection::on_read_websocket(boost::beast::error_code ec, std::size_t) { if (ec) { server_.remove_connection(id_); return; } if (server_.message_handler_) { server_.message_handler_(id_, boost::beast::buffers_to_string(buffer_.data()), http_request_, socket_.remote_endpoint().address().to_string()); } buffer_.consume(buffer_.size()); do_read_websocket(); } void websocket_server::connection::send(const std::string& message) { static int msg_number=0; if (!is_websocket_) return; // std::cerr << "SEND CALLED on thread " << pthread_self() << std::endl; queued_message msg; msg.data = message; msg.buffer = std::make_shared(); msg.seq = ++msg_number; boost::beast::ostream(*msg.buffer) << msg.data; std::unique_lock lock(queue_mutex_); message_queue_.push(msg); // std::cerr << "QUEUE size after push " << message_queue_.size() << std::endl; if(!writing_) { lock.unlock(); do_write(); } } void websocket_server::connection::do_write() { std::unique_lock lock(queue_mutex_); if(message_queue_.empty() || !is_websocket_) { writing_ = false; lock.unlock(); return; } writing_ = true; queued_message msg = message_queue_.front(); lock.unlock(); // std::cerr << "going to send msg " << msg.seq << std::endl; ws_stream_->async_write( msg.buffer->data(), [self = shared_from_this()]( boost::beast::error_code ec, std::size_t bytes_transferred) { self->on_write(ec, bytes_transferred); }); } void websocket_server::connection::close() { if (!is_websocket_) { socket_.shutdown(boost::asio::ip::tcp::socket::shutdown_both); server_.remove_connection(id_); return; } ws_stream_->async_close( boost::beast::websocket::close_code::normal, [self = shared_from_this()](boost::beast::error_code ec) { self->on_close(ec); }); } void websocket_server::connection::on_write(boost::beast::error_code ec, std::size_t) { //std::cerr << "sent msg " << message_queue_.front().seq // << "; queue size on_write " << message_queue_.size() << std::endl; std::unique_lock lock(queue_mutex_); message_queue_.pop(); lock.unlock(); if(ec) { server_.remove_connection(id_); return; } do_write(); } void websocket_server::connection::on_close(boost::beast::error_code ec) { server_.remove_connection(id_); } websocket_server::websocket_server(uint16_t port) { listen(port); } websocket_server::~websocket_server() { stop(); } void websocket_server::listen(uint16_t port) { boost::asio::ip::tcp::endpoint endpoint{boost::asio::ip::tcp::v4(), port}; acceptor_.emplace(ioc_); acceptor_->open(endpoint.protocol()); acceptor_->set_option(boost::asio::socket_base::reuse_address(true)); acceptor_->bind(endpoint); acceptor_->listen(64 /* backlog */); do_accept(); } uint16_t websocket_server::get_local_port() const { return acceptor_->local_endpoint().port(); } void websocket_server::set_message_handler(message_handler h) { message_handler_ = std::move(h); } void websocket_server::set_connect_handler(connect_handler h) { connect_handler_ = std::move(h); } void websocket_server::set_disconnect_handler(disconnect_handler h) { disconnect_handler_ = std::move(h); } void websocket_server::set_http_handler(http_handler h) { http_handler_ = std::move(h); } void websocket_server::do_accept() { if(!acceptor_) return; acceptor_->async_accept( [this](boost::beast::error_code ec, boost::asio::ip::tcp::socket socket) { if (!ec) { auto id = next_connection_id_++; auto conn = std::make_shared(ioc_, *this, id); socket.set_option(boost::asio::ip::tcp::no_delay(true)); conn->socket_ = std::move(socket); connections_[id] = conn; conn->start(); } else { std::cerr << "websocket::server::do_accept: error on accept, " << ec << std::endl; } // restart for the next connection do_accept(); }); } void websocket_server::send(id_type id, const std::string& message) { if (auto it = connections_.find(id); it != connections_.end()) { it->second->send(message); } } void websocket_server::close(id_type id) { if (auto it = connections_.find(id); it != connections_.end()) { it->second->close(); } } void websocket_server::remove_connection(id_type id) { if (disconnect_handler_) { disconnect_handler_(id); } connections_.erase(id); } void websocket_server::run() { static int calls=0; // std::cerr << "RUN CALLED on thread " << pthread_self() << std::endl; if(++calls>1) throw std::logic_error("Cannot call websocket_server::run multiple times."); ioc_.run(); } void websocket_server::stop() { boost::beast::error_code ec; if(acceptor_) acceptor_->close(ec); for (auto& [_, conn] : connections_) { conn->close(); } connections_.clear(); ioc_.stop(); } ================================================ FILE: client_server/websocket_server.hh ================================================ #pragma once #include #include #include #include #include #include #include #include #include #include #include #include class websocket_server { public: using id_type = std::size_t; using request_type = boost::beast::http::request; using response_type = boost::beast::http::response; using message_handler = std::function; using connect_handler = std::function; using disconnect_handler = std::function; using http_handler = std::function; websocket_server() = default; explicit websocket_server(uint16_t port); ~websocket_server(); websocket_server(const websocket_server&) = delete; websocket_server& operator=(const websocket_server&) = delete; // Change the port on which to listen. void listen(uint16_t port); void set_message_handler(message_handler h); void set_connect_handler(connect_handler h); void set_disconnect_handler(disconnect_handler h); void set_http_handler(http_handler h); // You should call `run` on a separate thread to start the // listening process. All callbacks on the message handlers will // come on this thread. It is safe to call `send` below from a // different thread. void run(); void stop(); // Send a message. This is safe on a thread which is not the // thread which called `run`. void send(id_type id, const std::string& message); void close(id_type id); uint16_t get_local_port() const; private: // The connection class handles all actual communication. There // is one instance for each connection. Connections are // identified by the single `id_type` stored as `id`. class connection : public std::enable_shared_from_this { public: connection(boost::asio::io_context& ioc, websocket_server& server, id_type id); void start(); void send(const std::string& message); void close(); friend websocket_server; private: void on_read_request(boost::beast::error_code ec, std::size_t); void on_websocket_accept(boost::beast::error_code ec); void do_read_websocket(); void on_read_websocket(boost::beast::error_code ec, std::size_t); void do_write(); void on_write(boost::beast::error_code ec, std::size_t); void on_close(boost::beast::error_code ec); void handle_http_request(); boost::asio::ip::tcp::socket socket_; std::optional> ws_stream_; boost::beast::flat_buffer buffer_; websocket_server::request_type http_request_; websocket_server& server_; id_type id_; class queued_message { public: queued_message() { // std::cerr << "Thread " << pthread_self() << " queued_message constructor " << (void*)this << std::endl; } ~queued_message() { // std::cerr << "Thread " << pthread_self() << " queued_message destructor " << (void*)this << std::endl; } std::string data; std::shared_ptr buffer; int seq; }; bool is_websocket_{false}; std::mutex queue_mutex_; // lock this for access to the variables below std::queue message_queue_; bool writing_{false}; }; void do_accept(); void remove_connection(id_type id); boost::asio::io_context ioc_; std::optional acceptor_; std::unordered_map> connections_; id_type next_connection_id_{0}; message_handler message_handler_; connect_handler connect_handler_; disconnect_handler disconnect_handler_; http_handler http_handler_; }; ================================================ FILE: cmake/cmake_uninstall.cmake.in ================================================ if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt") message(FATAL_ERROR "Cannot find install manifest: @CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt") endif() file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files) string(REGEX REPLACE "\n" ";" files "${files}") foreach(file ${files}) message(STATUS "Uninstalling $ENV{DESTDIR}${file}") if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}") execute_process( COMMAND "@CMAKE_COMMAND@" "-E" "remove" "$ENV{DESTDIR}${file}" OUTPUT_VARIABLE rm_out RESULT_VARIABLE rm_retval ) if(NOT "${rm_retval}" EQUAL 0) message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}") endif() else() message(STATUS "File $ENV{DESTDIR}${file} does not exist.") endif() endforeach(file) ================================================ FILE: cmake/functions.cmake ================================================ # Prints section headers macro(print_header TEXT) message("") message("-------------------------------------------") message(" ${TEXT}") message("-------------------------------------------") endmacro() # Install directory permissions macro(install_directory_permissions DIR) install( DIRECTORY DESTINATION ${DIR} DIRECTORY_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE ) endmacro() # Macro just like `install`, but converting the path from a unix # path to a windows path using `cygpath`. macro(winstall TYPE FILE TMP2 DEST) execute_process(COMMAND cygpath -m ${FILE} OUTPUT_VARIABLE WFILE OUTPUT_STRIP_TRAILING_WHITESPACE) if(EXISTS ${WFILE}) install(${TYPE} ${WFILE} DESTINATION ${DEST}) else() message(STATUS "WARNING: file/dir ${WFILE} not present, skipping installation") endif() endmacro() ================================================ FILE: cmake/modules/FindGLIBMM3.cmake ================================================ if(WIN33) windows_find_library(GLIBMM3_LIBRARIES REQUIRED glibmm sigc glib gobject) if (GLIBMM3_LIBRARIES) set(GLIBMM3_FOUND TRUE) endif() else() find_package(PkgConfig REQUIRED) if(MACOS) execute_process(COMMAND brew --prefix glibmm@2.64 OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE GLIBMM_PREFIX) execute_process(COMMAND brew --prefix cairomm@1.14 OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE CAIROMM_PREFIX) execute_process(COMMAND brew --prefix pangomm@2.42 OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE PANGOMM_PREFIX) execute_process(COMMAND brew --prefix atkmm@2.28 OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE ATKMM_PREFIX) set( ENV{PKG_CONFIG_PATH} "${GLIBMM_PREFIX}/lib/pkgconfig:${CAIROMM_PREFIX}/lib/pkgconfig:${PANGOMM_PREFIX}/lib/pkgconfig:${ATKMM_PREFIX}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}" ) message(STATUS "Using PKG_CONFIG_PATH = $ENV{PKG_CONFIG_PATH}") endif() pkg_check_modules(GLIBMM REQUIRED glibmm-2.4) include_directories(${GLIBMM_INCLUDE_DIRS}) link_directories(${GLIBMM_LIBRARY_DIRS}) add_definitions(${GLIBMM_CFLAGS_OTHER}) endif() if (GLIBMM_FOUND) message(STATUS "Found glibmm; include path ${GLIBMM_INCLUDE_DIRS}") message(STATUS "Found glibmm; lib ${GLIBMM_LIBRARIES}") endif() ================================================ FILE: cmake/modules/FindGLIBMM4.cmake ================================================ if(WIN33) windows_find_library(GLIBMM4_LIBRARIES REQUIRED glibmm sigc glib gobject) if (GLIBMM4_LIBRARIES) set(GLIBMM4_FOUND TRUE) endif() else() find_package(PkgConfig REQUIRED) if(MACOS) execute_process(COMMAND brew --prefix glibmm@2.68 OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE GLIBMM_PREFIX) execute_process(COMMAND brew --prefix cairomm@1.16 OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE CAIROMM_PREFIX) execute_process(COMMAND brew --prefix pangomm@2.48 OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE PANGOMM_PREFIX) execute_process(COMMAND brew --prefix atkmm@2.28 OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE ATKMM_PREFIX) set( ENV{PKG_CONFIG_PATH} "${GLIBMM_PREFIX}/lib/pkgconfig:${CAIROMM_PREFIX}/lib/pkgconfig:${PANGOMM_PREFIX}/lib/pkgconfig:${ATKMM_PREFIX}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}" ) message(STATUS "Using PKG_CONFIG_PATH = $ENV{PKG_CONFIG_PATH}") endif() pkg_check_modules(GLIBMM REQUIRED glibmm-2.68) include_directories(${GLIBMM_INCLUDE_DIRS}) link_directories(${GLIBMM_LIBRARY_DIRS}) add_definitions(${GLIBMM_CFLAGS_OTHER}) endif() if (GLIBMM_FOUND) message(STATUS "Found glibmm; include path ${GLIBMM_INCLUDE_DIRS}") message(STATUS "Found glibmm; lib ${GLIBMM_LIBRARIES}") endif() ================================================ FILE: cmake/modules/FindGMPXX.cmake ================================================ # Find the GMPXX library and its GMP dependency. # Simply looks for the shared libraries on Linux/OSX. # On Windows, finds MPIR using the logic in # ../winlibs.cmake (which works for building against # vcpkg). if (WIN323) windows_find_library(GMP_LIBRARIES mpir) if (GMP_LIBRARIES) set(GMP_FOUND TRUE) endif() windows_find_library(GMPXX_LIBRARIES mpir) if (GMPXX_LIBRARIES) set(GMPXX_FOUND TRUE) endif() else() find_path(GMP_INCLUDE_DIRS NAMES gmp.h REQUIRED) find_library(GMP_LIBRARIES gmp REQUIRED) find_library(GMPXX_LIBRARIES gmpxx REQUIRED) message("-- Found gmp header at ${GMP_INCLUDE_DIRS}") message("-- Found gmp library at ${GMP_LIBRARIES}") set(GMP_FOUND 1) set(GMPXX_FOUND 1) endif() if (GMP_FOUND) message(STATUS "Found gmp") else() message(FATAL_ERROR "Gmp not found") endif() if (GMPXX_FOUND) message(STATUS "Found gmpxx") else() message(FATAL_ERROR "Gmpxx not found") endif() ================================================ FILE: cmake/modules/FindGTKMM3.cmake ================================================ if(MSVC) windows_find_library(GTKMM_LIBRARIES gtk gdk gdk_pixbuf pangocairo pango atk gio gobject gmodule glib cairo-gobject cairo intl atkmm cairomm gdkmm giomm glibmm gtkmm pangomm ) if (GTKMM_LIBRARIES) set(GTKMM3_FOUND TRUE) endif() else() find_package(PkgConfig REQUIRED) pkg_check_modules(GTKMM REQUIRED IMPORTED_TARGET gtkmm-3.0) pkg_check_modules(GLIBMM REQUIRED IMPORTED_TARGET glibmm-2.4) pkg_check_modules(GLIB REQUIRED IMPORTED_TARGET glib-2.0) pkg_check_modules(PangoMM REQUIRED IMPORTED_TARGET pangomm-1.4) pkg_check_modules(CairoMM REQUIRED IMPORTED_TARGET cairomm-1.0) include_directories(${GTKMM_INCLUDE_DIRS} ${CairoMM_INCLUDE_DIRS} ${PangoMM_INCLUDE_DIRS}) link_directories(${GTKMM_LIBRARY_DIRS}) add_definitions(${GTKMM_CFLAGS_OTHER}) if(GTKMM_FOUND) set(GTKMM3_FOUND TRUE) endif() endif() if(GTKMM3_FOUND) message(STATUS "Found gtkmm3") endif() ================================================ FILE: cmake/modules/FindGTKMM4.cmake ================================================ if(MSVC) windows_find_library(GTKMM_LIBRARIES gtk gdk gdk_pixbuf pangocairo pango atk gio gobject gmodule glib cairo-gobject cairo intl atkmm cairomm gdkmm giomm glibmm gtkmm pangomm ) if (GTKMM_LIBRARIES) set(GTKMM4_FOUND TRUE) endif() else() find_package(PkgConfig REQUIRED) pkg_check_modules(GTKMM REQUIRED IMPORTED_TARGET gtkmm-4.0) pkg_check_modules(GLIBMM REQUIRED IMPORTED_TARGET glibmm-2.68) pkg_check_modules(PangoMM REQUIRED IMPORTED_TARGET pangomm-2.48) pkg_check_modules(CairoMM REQUIRED IMPORTED_TARGET cairomm-1.16) include_directories(${GTKMM_INCLUDE_DIRS}) link_directories(${GTKMM_LIBRARY_DIRS}) add_definitions(${GTKMM_CFLAGS_OTHER}) if(GTKMM_FOUND) set(GTKMM4_FOUND TRUE) endif() endif() if(GTKMM4_FOUND) message(STATUS "Found gtkmm4") endif() ================================================ FILE: cmake/modules/FindJSONCPP.cmake ================================================ find_package(PkgConfig REQUIRED) pkg_check_modules(JSONCPP jsoncpp) #message("**** ${JSONCPP_INCLUDE_DIRS}") #message("**** ${JSONCPP_LIBRARIES}") ================================================ FILE: cmake/modules/FindLibPythonOSX.py ================================================ # Note by Nikolaus Demmel 28.03.2014: My contributions are licensend under the # same as CMake (BSD). My adaptations are in part based # https://github.com/qgis/QGIS/tree/master/cmake which has the following # copyright note: # FindLibPython.py # Copyright (c) 2007, Simon Edwards # Redistribution and use is allowed according to the terms of the BSD license. # For details see the accompanying COPYING-CMAKE-SCRIPTS file. import sys import distutils.sysconfig print("exec_prefix:%s" % sys.exec_prefix) print("major_version:%s" % str(sys.version_info[0])) print("minor_version:%s" % str(sys.version_info[1])) print("patch_version:%s" % str(sys.version_info[2])) print("short_version:%s" % '.'.join(map(lambda x:str(x), sys.version_info[0:2]))) print("long_version:%s" % '.'.join(map(lambda x:str(x), sys.version_info[0:3]))) print("py_inc_dir:%s" % distutils.sysconfig.get_python_inc()) print("site_packages_dir:%s" % distutils.sysconfig.get_python_lib(plat_specific=1)) for e in distutils.sysconfig.get_config_vars ('LIBDIR'): if e != None: print("py_lib_dir:%s" % e) break ================================================ FILE: cmake/modules/FindMathematica.cmake ================================================ # - Try to find Mathematica installation and provide CMake functions for its C/C++ interface # # See the FindMathematica manual for usage hints. # #============================================================================= # Copyright 2010-2021 Sascha Kratky # # Permission is hereby granted, free of charge, to any person) # obtaining a copy of this software and associated documentation) # files (the "Software"), to deal in the Software without) # restriction, including without limitation the rights to use,) # copy, modify, merge, publish, distribute, sublicense, and/or sell) # copies of the Software, and to permit persons to whom the) # Software is furnished to do so, subject to the following) # conditions:) # # The above copyright notice and this permission notice shall be) # included in all copies or substantial portions of the Software.) # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,) # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES) # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND) # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT) # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,) # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING) # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR) # OTHER DEALINGS IN THE SOFTWARE.) #============================================================================= # we need the CMakeParseArguments module # call cmake_minimum_required, but prevent modification of the CMake policy stack cmake_policy(PUSH) cmake_minimum_required(VERSION 2.8.12) cmake_policy(POP) set (Mathematica_CMAKE_MODULE_DIR "${CMAKE_CURRENT_LIST_DIR}") set (Mathematica_CMAKE_MODULE_VERSION "3.8.0") # activate select policies if (POLICY CMP0025) # Compiler id for Apple Clang is now AppleClang cmake_policy(SET CMP0025 NEW) endif() if (POLICY CMP0026) # disallow use of the LOCATION target property if (CYGWIN OR MSYS) # Cygwin and MSYS do not produce workable Mathematica paths using # the $ notation cmake_policy(SET CMP0026 OLD) else() cmake_policy(SET CMP0026 NEW) endif() endif() if (POLICY CMP0038) # targets may not link directly to themselves cmake_policy(SET CMP0038 NEW) endif() if (POLICY CMP0039) # utility targets may not have link dependencies cmake_policy(SET CMP0039 NEW) endif() if (POLICY CMP0040) # target in the TARGET signature of add_custom_command() must exist cmake_policy(SET CMP0040 NEW) endif() if (POLICY CMP0045) # error on non-existent target in get_target_property cmake_policy(SET CMP0045 NEW) endif() if (POLICY CMP0046) # error on non-existent dependency in add_dependencies cmake_policy(SET CMP0046 NEW) endif() if (POLICY CMP0049) # do not expand variables in target source entries cmake_policy(SET CMP0049 NEW) endif() if (POLICY CMP0050) # disallow add_custom_command SOURCE signatures cmake_policy(SET CMP0050 NEW) endif() if (POLICY CMP0051) # include TARGET_OBJECTS expressions in a target's SOURCES property cmake_policy(SET CMP0051 NEW) endif() if (POLICY CMP0053) # simplify variable reference and escape sequence evaluation cmake_policy(SET CMP0053 NEW) endif() if (POLICY CMP0054) # only interpret if() arguments as variables or keywords when unquoted cmake_policy(SET CMP0054 NEW) endif() include(TestBigEndian) include(CMakeParseArguments) include(FindPackageHandleStandardArgs) include(CMakeFindFrameworks) # internal function to convert Windows path to Cygwin workable CMake path # E.g., "C:\Program Files" is converted to "/cygdrive/c/Program Files" # file(TO_CMAKE_PATH "C:\Program Files" ...) would result in unworkable "C;/Program Files" function (_to_cmake_path _inPath _outPathVariable) if (CYGWIN) find_program(Mathematica_CYGPATH_EXECUTABLE "cygpath") mark_as_advanced(Mathematica_CYGPATH_EXECUTABLE) execute_process( COMMAND "${Mathematica_CYGPATH_EXECUTABLE}" "--unix" "${_inPath}" TIMEOUT 5 OUTPUT_VARIABLE ${_outPathVariable} OUTPUT_STRIP_TRAILING_WHITESPACE) else() file(TO_CMAKE_PATH "${_inPath}" ${_outPathVariable}) endif() set (${_outPathVariable} "${${_outPathVariable}}" PARENT_SCOPE) endfunction() # internal function to convert CMake path to "pure" native path without escapes function (_to_native_path _inPath _outPathVariable) # do not use the built-in function file (TO_NATIVE_PATH ...), # which does too much or the wrong thing: # it converts a CMake path to a native path but then also escapes all blanks # and special characters # under MinGW it produces unworkable paths with forward slashes if (CYGWIN) find_program(Mathematica_CYGPATH_EXECUTABLE "cygpath") mark_as_advanced(Mathematica_CYGPATH_EXECUTABLE) execute_process( COMMAND "${Mathematica_CYGPATH_EXECUTABLE}" "--mixed" "${_inPath}" TIMEOUT 5 OUTPUT_VARIABLE ${_outPathVariable} OUTPUT_STRIP_TRAILING_WHITESPACE) elseif (CMAKE_HOST_UNIX) # use CMake path literally under UNIX set (${_outPathVariable} "${_inPath}") elseif (CMAKE_HOST_WIN32) string (REPLACE "/" "\\" ${_outPathVariable} "${_inPath}") else() message (FATAL_ERROR "Unsupported host platform ${CMAKE_HOST_SYSTEM_NAME}") endif() set (${_outPathVariable} "${${_outPathVariable}}" PARENT_SCOPE) endfunction() # internal macro to set a file's executable bit under UNIX macro (_make_file_executable _inPath) if (CMAKE_HOST_UNIX) _to_native_path ("${_inPath}" _nativePath) execute_process( COMMAND chmod "-f" "+x" "${_nativePath}" TIMEOUT 5) endif() endmacro() # internal macro to convert list to command string with quoting macro (_list_to_cmd_str _outCmd) set (_str "") foreach (_arg ${ARGN}) if ("${_arg}" MATCHES " ") set (_arg "\"${_arg}\"") endif() if (_str) set (_str "${_str} ${_arg}") else() set (_str "${_arg}") endif() endforeach() set (${_outCmd} "${_str}") endmacro() # internal macro to compute kernel paths (relative to installation directory) macro (_get_host_kernel_names _outKernelNames) if (Mathematica_FIND_VERSION AND Mathematica_FIND_VERSION_EXACT) if (Mathematica_FIND_VERSION VERSION_LESS "10.0.0") if (CMAKE_HOST_WIN32 OR CYGWIN) set (${_outKernelNames} "math.exe") elseif (CMAKE_HOST_APPLE) set (${_outKernelNames} "Contents/MacOS/MathKernel") elseif (CMAKE_HOST_UNIX) set (${_outKernelNames} "Executables/MathKernel" "Executables/math") endif() else() if (CMAKE_HOST_WIN32 OR CYGWIN) set (${_outKernelNames} "wolfram.exe") elseif (CMAKE_HOST_APPLE) set (${_outKernelNames} "Contents/MacOS/WolframKernel") elseif (CMAKE_HOST_UNIX) set (${_outKernelNames} "Executables/WolframKernel") endif() endif() else() if (CMAKE_HOST_WIN32 OR CYGWIN) set (${_outKernelNames} "wolfram.exe" "math.exe") elseif (CMAKE_HOST_APPLE) set (${_outKernelNames} "Contents/MacOS/WolframKernel" "Contents/MacOS/MathKernel") elseif (CMAKE_HOST_UNIX) set (${_outKernelNames} "Executables/WolframKernel" "Executables/MathKernel" "Executables/math") endif() endif() endmacro() # internal macro to to compute front end paths (relative to installation directory) macro (_get_host_frontend_names _outFrontEndNames) if (CMAKE_HOST_WIN32 OR CYGWIN) set (${_outFrontEndNames} "Mathematica.exe") elseif (CMAKE_HOST_APPLE) set (${_outFrontEndNames} "Contents/MacOS/Mathematica") elseif (CMAKE_HOST_UNIX) set (${_outFrontEndNames} "Executables/mathematica" "Executables/Mathematica") endif() endmacro() # internal macro to compute program name from product name and version # E.g., "Mathematica" and "7.0" gives "Mathematica 7.0.app" for Mac OS X macro (_append_program_names _product _version _outProgramNames) string (REPLACE " " "" _productWithoutBlanks "${_product}") if (CMAKE_HOST_APPLE) if (${_version}) # under Mac OS X the application name may contain the version number as a suffix list (APPEND ${_outProgramNames} "${_product} ${_version}.app") list (APPEND ${_outProgramNames} "${_productWithoutBlanks} ${_version}.app") else() list (APPEND ${_outProgramNames} "${_product}.app") list (APPEND ${_outProgramNames} "${_productWithoutBlanks}.app") endif() else() if (${_version}) # other platforms have a sub-directory named after the version number list (APPEND ${_outProgramNames} "${_product}/${_version}") list (APPEND ${_outProgramNames} "${_productWithoutBlanks}/${_version}") endif() endif() endmacro() # internal macro to determine search order for different versions of Mathematica macro (_get_program_names _outProgramNames) set (${_outProgramNames} "") # Mathematica products in order of preference set (_MathematicaApps "Mathematica" "mathematica" "Wolfram Desktop" "Wolfram Engine" "gridMathematica Server") # Mathematica product versions in order of preference set (_MathematicaVersions "13.2" "13.1" "13.0" "12.3" "12.2" "12.1" "12.0" "11.3" "11.2" "11.1" "11.0" "10.4" "10.3" "10.2" "10.1" "10.0" "9.0" "8.0" "7.0" "6.0" "5.2") # search for explicitly requested application version first if (Mathematica_FIND_VERSION AND Mathematica_FIND_VERSION_EXACT) foreach (_product IN LISTS _MathematicaApps) _append_program_names("${_product}" "${Mathematica_FIND_VERSION_MAJOR}.${Mathematica_FIND_VERSION_MINOR}" ${_outProgramNames}) endforeach() endif() # then try all qualified application names foreach (_product IN LISTS _MathematicaApps) foreach (_version IN LISTS _MathematicaVersions) _append_program_names("${_product}" "${_version}" ${_outProgramNames}) endforeach() endforeach() # then try unqualified application names foreach (_product IN LISTS _MathematicaApps) _append_program_names("${_product}" "" ${_outProgramNames}) endforeach() list (REMOVE_DUPLICATES ${_outProgramNames}) endmacro() # internal function to get Mathematica Windows installation directory for a registry entry function (_add_registry_search_path _registryKey _outSearchPaths) set (_ProductNamePatterns "Wolfram Mathematica [0-9.]+" "Wolfram Desktop [0-9.]+" "Wolfram Engine [0-9.]+" "Wolfram Finance Platform") get_filename_component ( _productName "[${_registryKey};ProductName]" NAME) get_filename_component ( _productVersion "[${_registryKey};ProductVersion]" NAME) get_filename_component ( _productPath "[${_registryKey};ExecutablePath]" PATH) if (Mathematica_DEBUG) message (STATUS "[${_registryKey};ProductName]=${_productName}") message (STATUS "[${_registryKey};ProductVersion]=${_productVersion}") message (STATUS "[${_registryKey};ExecutablePath]=${_productPath}") endif() set (_qualified False) foreach (_Pattern IN LISTS _ProductNamePatterns) if ("${_productName}" MATCHES "${_Pattern}") set (_qualified True) break() endif() endforeach() if (_qualified) if (EXISTS "${_productPath}") _to_cmake_path("${_productPath}" _path) if (Mathematica_FIND_VERSION AND Mathematica_FIND_VERSION_EXACT) if ("${_productVersion}" MATCHES "${Mathematica_FIND_VERSION}") # prepend if version matches requested one list (INSERT ${_outSearchPaths} 0 "${_path}") else() list (APPEND ${_outSearchPaths} "${_path}") endif() else() list (APPEND ${_outSearchPaths} "${_path}") endif() endif() endif() set (${_outSearchPaths} ${${_outSearchPaths}} PARENT_SCOPE) endfunction() # internal function to determine Mathematica installation paths from Windows registry function (_add_registry_search_paths _outSearchPaths) if (CMAKE_HOST_WIN32) foreach (_registryKey IN ITEMS ${ARGN}) set (_regExe "reg.exe") if (DEFINED ENV{windir}) # use 64-bit reg.exe under WoW64 to make sure we search all keys if (EXISTS "$ENV{windir}/sysnative/reg.exe") set (_regExe "$ENV{windir}/sysnative/reg.exe") endif() endif() execute_process( COMMAND "${_regExe}" query "${_registryKey}" "/s" COMMAND findstr "${_registryKey}" TIMEOUT 5 OUTPUT_VARIABLE _queryResult ERROR_QUIET) string (REGEX MATCHALL "[0-9]+" _installIDs "${_queryResult}") if (_installIDs) # _installIDs sorted from oldest to newest version list (REVERSE _installIDs) set (_paths "") foreach (_installID IN LISTS _installIDs) _add_registry_search_path("${_registryKey}\\${_installID}" _paths) endforeach() list (APPEND ${_outSearchPaths} ${_paths}) endif() endforeach() set (${_outSearchPaths} ${${_outSearchPaths}} PARENT_SCOPE) endif() endfunction() # internal function to determine Mathematica installation paths from Mac OS X LaunchServices database function (_add_launch_services_search_paths _outSearchPaths) if (CMAKE_HOST_APPLE) # the lsregister executable is needed to search the LaunchServices database # the executable usually resides in the LaunchServices framework Support directory # The LaunchServices framework is a sub-framework of the CoreServices umbrella framework cmake_find_frameworks(CoreServices) find_program (Mathematica_LSRegister_EXECUTABLE NAMES "lsregister" PATH_SUFFIXES "/Frameworks/LaunchServices.framework/Support" HINTS ${CoreServices_FRAMEWORKS}) mark_as_advanced( Mathematica_CoreServices_DIR Mathematica_LaunchServices_DIR Mathematica_LSRegister_EXECUTABLE) if (NOT Mathematica_LSRegister_EXECUTABLE) message (STATUS "Skipping search of the LaunchServices database, because the lsregister executable could not be found.") return() endif() foreach (_bundleID IN ITEMS ${ARGN}) execute_process( COMMAND "${Mathematica_LSRegister_EXECUTABLE}" "-dump" COMMAND "grep" "--before-context=20" "--after-context=20" "${_bundleID}" COMMAND "grep" "--only-matching" "/.*\\.app" TIMEOUT 10 OUTPUT_VARIABLE _queryResult ERROR_QUIET) string (REPLACE ";" "\\;" _queryResult "${_queryResult}") string (REPLACE "\n" ";" _appPaths "${_queryResult}") if (_appPaths) # put paths into canonical order list (SORT _appPaths) list (REVERSE _appPaths) else() message (STATUS "No Mathematica apps registered in Mac OS X LaunchServices database.") endif() if (Mathematica_DEBUG) message (STATUS "Mac OS X LaunchServices database registered apps=${_appPaths}") endif() if (_appPaths) set (_paths "") set (_insertIndex 0) foreach (_appPath IN LISTS _appPaths) # ignore paths that no longer exist if (EXISTS "${_appPath}") _to_cmake_path("${_appPath}" _appPath) if (Mathematica_FIND_VERSION AND Mathematica_FIND_VERSION_EXACT) if ("${_appPath}" MATCHES "${Mathematica_FIND_VERSION_MAJOR}.${Mathematica_FIND_VERSION_MINOR}") # insert in front of other versions if version matches requested one list (LENGTH _paths _len) if (_len EQUAL _insertIndex) list (APPEND _paths "${_appPath}") else() list (INSERT _paths ${_insertIndex} "${_appPath}") endif() math(EXPR _insertIndex "${_insertIndex} + 1") else() list (APPEND _paths "${_appPath}") endif() else() list (APPEND _paths "${_appPath}") endif() endif() endforeach() list (APPEND ${_outSearchPaths} ${_paths}) endif() endforeach() set (${_outSearchPaths} ${${_outSearchPaths}} PARENT_SCOPE) endif() endfunction() # internal macro to determine default Mathematica installation (the one which is on the system search path) macro (_add_default_search_path _outSearchPaths) set (_searchPaths "") if (DEFINED ENV{PATH}) file (TO_CMAKE_PATH "$ENV{PATH}" _searchPaths) endif() _get_host_kernel_names(_kernelNames) foreach (_searchPath IN LISTS _searchPaths) if (CMAKE_HOST_WIN32 OR CYGWIN) set (_executable "${_searchPath}/math.exe") else() set (_executable "${_searchPath}/math") endif() if (EXISTS "${_executable}") # resolve symlinks get_filename_component (_executable "${_executable}" REALPATH) foreach (_kernelName IN LISTS _kernelNames) string (REPLACE "${_kernelName}" "" _executableDir "${_executable}") if (NOT "${_executable}" STREQUAL "${_executableDir}" AND IS_DIRECTORY ${_executableDir}) if (Mathematica_FIND_VERSION) list (APPEND ${_outSearchPaths} "${_executableDir}") else() # prefer default installation if not searching for version explicitly list (INSERT ${_outSearchPaths} 0 "${_executableDir}") endif() endif() endforeach() endif() endforeach() endmacro() # internal macro to determine platform specific Mathematica installation search paths macro (_get_search_paths _outSearchPaths) set (${_outSearchPaths} "") if (CMAKE_HOST_WIN32 OR CYGWIN) # add non-standard installation paths from Windows registry _add_registry_search_paths(${_outSearchPaths} "HKEY_LOCAL_MACHINE\\SOFTWARE\\Wolfram Research\\Installations" "HKEY_LOCAL_MACHINE\\SOFTWARE\\Wow6432Node\\Wolfram Research\\Installations") # environment variable locations where Mathematica may be installed set (_WindowsProgramFilesEnvVars to "ProgramW6432" "ProgramFiles(x86)" "ProgramFiles" ) if (CYGWIN) # Cygwin may be configured to convert all environment variables to all-uppercase list (APPEND _WindowsProgramFilesEnvVars "PROGRAMW6432" "PROGRAMFILES(X86)" "PROGRAMFILES") endif() # add standard Mathematica Windows installation paths foreach (_envVar IN LISTS _WindowsProgramFilesEnvVars) if (DEFINED ENV{${_envVar}) _to_cmake_path("$ENV{${_envVar}}" _unixPath) list (APPEND ${_outSearchPaths} "${_unixPath}/Wolfram Research" ) endif() endforeach() # add default installation path if (IS_DIRECTORY "C:/Program Files/Wolfram Research") list (APPEND ${_outSearchPaths} "C:/Program Files/Wolfram Research" ) endif() # Windows container paths may be lowercase if (IS_DIRECTORY "C:/Program Files/wolfram research") list (APPEND ${_outSearchPaths} "C:/Program Files/wolfram research" ) endif() elseif (CMAKE_HOST_APPLE) # add standard Mathematica Mac OS X installation paths list (APPEND ${_outSearchPaths} "~/Applications;/Applications") if (CMAKE_SYSTEM_APPBUNDLE_PATH) list (APPEND ${_outSearchPaths} ${CMAKE_SYSTEM_APPBUNDLE_PATH}) endif() # add non-standard installation paths from Mac OS X LaunchServices database _add_launch_services_search_paths(${_outSearchPaths} "com.wolfram.Mathematica") elseif (CMAKE_HOST_UNIX) # add standard Mathematica Unix installation paths list (APPEND ${_outSearchPaths} "/usr/local/Wolfram" "/opt/Wolfram") endif() _add_default_search_path(${_outSearchPaths}) if (${_outSearchPaths}) list (REMOVE_DUPLICATES ${_outSearchPaths}) endif() endmacro() # internal macro to compute Mathematica SystemIDs from system name macro (_systemNameToSystemID _systemName _systemProcessor _outSystemIDs) if ("${_systemName}" STREQUAL "Windows") if ("${_systemProcessor}" STREQUAL "AMD64") set (${_outSystemIDs} "Windows-x86-64") else() # default to 32-bit Windows set (${_outSystemIDs} "Windows") endif() elseif ("${_systemName}" STREQUAL "CYGWIN") if ("${_systemProcessor}" STREQUAL "x86_64") set (${_outSystemIDs} "Windows-x86-64") else() # default to 32-bit Windows set (${_outSystemIDs} "Windows") endif() elseif ("${_systemName}" STREQUAL "Darwin") if ("${_systemProcessor}" STREQUAL "i386") set (${_outSystemIDs} "MacOSX-x86") elseif ("${_systemProcessor}" STREQUAL "x86_64") set (${_outSystemIDs} "MacOSX-x86-64") elseif ("${_systemProcessor}" STREQUAL "arm64") set (${_outSystemIDs} "MacOSX-ARM64") elseif ("${_systemProcessor}" MATCHES "ppc64|powerpc64") set (${_outSystemIDs} "Darwin-PowerPC64") elseif ("${_systemProcessor}" MATCHES "ppc|powerpc") if (Mathematica_VERSION) # Mathematica versions before 6 used "Darwin" as system ID for ppc32 if (NOT "${Mathematica_VERSION}" VERSION_LESS "6.0") set (${_outSystemIDs} "MacOSX") else() set (${_outSystemIDs} "Darwin") endif() else () set (${_outSystemIDs} "MacOSX" "Darwin") endif() endif() elseif ("${_systemName}" STREQUAL "Linux") if ("${_systemProcessor}" MATCHES "^i.86$") set (${_outSystemIDs} "Linux") elseif ("${_systemProcessor}" MATCHES "x86_64|amd64") set (${_outSystemIDs} "Linux-x86-64") elseif ("${_systemProcessor}" STREQUAL "ia64") set (${_outSystemIDs} "Linux-IA64") elseif ("${_systemProcessor}" MATCHES "^arm") set (${_outSystemIDs} "Linux-ARM") endif() elseif ("${_systemName}" STREQUAL "SunOS") if ("${_systemProcessor}" MATCHES "^sparc") if (Mathematica_VERSION) # Mathematica versions before 6 used "UltraSPARC" as system ID for Solaris if (NOT "${Mathematica_VERSION}" VERSION_LESS "6.0") set (${_outSystemIDs} "Solaris-SPARC") else() set (${_outSystemIDs} "UltraSPARC") endif() else () set (${_outSystemIDs} "Solaris-SPARC" "UltraSPARC") endif() elseif ("${_systemProcessor}" STREQUAL "x86_64") set (${_outSystemIDs} "Solaris-x86-64") endif() elseif ("${_systemName}" STREQUAL "AIX") set (${_outSystemIDs} "AIX-Power64") elseif ("${_systemName}" STREQUAL "HP-UX") set (${_outSystemIDs} "HPUX-PA64") elseif ("${_systemName}" STREQUAL "IRIX") set (${_outSystemIDs} "IRIX-MIPS64") endif() endmacro(_systemNameToSystemID) # internal macro to compute target Mathematica SystemIDs macro (_get_system_IDs _outSystemIDs) if (WIN32 OR CYGWIN) # pointer size check is more reliable than CMAKE_SYSTEM_PROCESSOR if (CMAKE_SIZEOF_VOID_P EQUAL 8) set (${_outSystemIDs} "Windows-x86-64") else() set (${_outSystemIDs} "Windows") endif() elseif (APPLE) set (${_outSystemIDs} "") if (CMAKE_OSX_ARCHITECTURES) # determine System ID from specified architectures foreach (_arch ${CMAKE_OSX_ARCHITECTURES}) set (_systemID "") _systemNameToSystemID("${CMAKE_SYSTEM_NAME}" "${_arch}" _systemID) if (_systemID) list (APPEND ${_outSystemIDs} ${_systemID}) else() message (FATAL_ERROR "Unsupported Mac OS X architecture ${_arch}") endif() endforeach() else() # determine System ID by checking endianness and pointer size TEST_BIG_ENDIAN(_isBigEndian) if (_isBigEndian) if (CMAKE_SIZEOF_VOID_P EQUAL 8) set (${_outSystemIDs} "Darwin-PowerPC64") else() if (Mathematica_VERSION) # Mathematica versions before 6 used "Darwin" as system ID for ppc32 if (NOT "${Mathematica_VERSION}" VERSION_LESS "6.0") set (${_outSystemIDs} "MacOSX") else() set (${_outSystemIDs} "Darwin") endif() else () set (${_outSystemIDs} "MacOSX" "Darwin") endif() endif() else() if (CMAKE_SIZEOF_VOID_P EQUAL 8) set (${_outSystemIDs} "MacOSX-x86-64") else() set (${_outSystemIDs} "MacOSX-x86") endif() endif() endif() elseif (UNIX) if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Linux") # pointer size check is more reliable than CMAKE_SYSTEM_PROCESSOR if (CMAKE_SIZEOF_VOID_P EQUAL 8) set (${_outSystemIDs} "Linux-x86-64") else() set (${_outSystemIDs} "Linux") endif() else() _systemNameToSystemID("${CMAKE_SYSTEM_NAME}" "${CMAKE_SYSTEM_PROCESSOR}" ${_outSystemIDs}) endif() else() set (${_outSystemIDs} "Generic") endif() list (REMOVE_DUPLICATES ${_outSystemIDs}) endmacro(_get_system_IDs) # internal macro to compute host Mathematica SystemIDs macro (_get_host_system_IDs _outSystemIDs) if (CMAKE_HOST_WIN32) set (${_outSystemIDs} "Windows") if (DEFINED ENV{PROCESSOR_ARCHITEW6432}) if ("$ENV{PROCESSOR_ARCHITEW6432}" STREQUAL "AMD64") # running of WoW64, host is native 64-bit Windows set (${_outSystemIDs} "Windows-x86-64") endif() elseif (DEFINED ENV{PROCESSOR_ARCHITECTURE}) if ("$ENV{PROCESSOR_ARCHITECTURE}" STREQUAL "AMD64") # host is native 64-bit Windows set (${_outSystemIDs} "Windows-x86-64") endif() endif() else() # always determine host system ID from # CMAKE_HOST_SYSTEM_NAME and CMAKE_HOST_SYSTEM_PROCESSOR if (_CMAKE_OSX_MACHINE) # work-around for Mac OS X, where CMAKE_HOST_SYSTEM_PROCESSOR is not always accurate set (_hostSystemProcessor "${_CMAKE_OSX_MACHINE}") else() set (_hostSystemProcessor "${CMAKE_HOST_SYSTEM_PROCESSOR}") endif() _systemNameToSystemID( "${CMAKE_HOST_SYSTEM_NAME}" "${_hostSystemProcessor}" _hostSystemID) if (NOT DEFINED _hostSystemID) message (FATAL_ERROR "Unsupported host platform ${CMAKE_HOST_SYSTEM_NAME}") endif() _get_compatible_system_IDs(${_hostSystemID} ${_outSystemIDs}) endif() endmacro() macro (_get_supported_systemIDs _version _outSystemIDs) if (NOT "${_version}" VERSION_LESS "12.3") set (${_outSystemIDs} "Windows-x86-64" "Linux-x86-64" "Linux-ARM" "MacOSX-x86-64" "MacOSX-ARM64") elseif (NOT "${_version}" VERSION_LESS "12.1") set (${_outSystemIDs} "Windows-x86-64" "Linux-x86-64" "Linux-ARM" "MacOSX-x86-64") elseif (NOT "${_version}" VERSION_LESS "11.3") set (${_outSystemIDs} "Windows" "Windows-x86-64" "Linux-x86-64" "Linux-ARM" "MacOSX-x86-64") elseif (NOT "${_version}" VERSION_LESS "10.0") set (${_outSystemIDs} "Windows" "Windows-x86-64" "Linux" "Linux-x86-64" "Linux-ARM" "MacOSX-x86-64") elseif (NOT "${_version}" VERSION_LESS "9.0") set (${_outSystemIDs} "Windows" "Windows-x86-64" "Linux" "Linux-x86-64" "MacOSX-x86-64") elseif (NOT "${_version}" VERSION_LESS "8.0") set (${_outSystemIDs} "Windows" "Windows-x86-64" "Linux" "Linux-x86-64" "MacOSX-x86" "MacOSX-x86-64") elseif (NOT "${_version}" VERSION_LESS "7.0") set (${_outSystemIDs} "Windows" "Windows-x86-64" "Linux" "Linux-x86-64" "MacOSX-x86" "MacOSX-x86-64" "MacOSX" "Solaris-SPARC" "Solaris-x86-64") elseif (NOT "${_version}" VERSION_LESS "6.0") set (${_outSystemIDs} "Windows" "Windows-x86-64" "Linux" "Linux-x86-64" "Linux-IA64" "MacOSX-x86" "MacOSX-x86-64" "MacOSX" "Solaris-SPARC" "Solaris-x86-64" "AIX-Power64") elseif (NOT "${_version}" VERSION_LESS "5.2") set (${_outSystemIDs} "Windows" "Windows-x86-64" "Linux" "Linux-x86-64" "Linux-IA64" "MacOSX-x86" "Darwin-PowerPC64" "Darwin" "UltraSPARC" "Solaris-x86-64" "AIX-Power64" "DEC-AXP" "HPUX-PA64" "IRIX-MIPS64") else() # platforms probably supported before 5.2? set (${_outSystemIDs} "Windows" "Linux" "Linux-x86-64" "Linux-IA64" "Linux-PPC" "Darwin" "Solaris" "SGI" "IBM-RISC" "DEC-AXP" "HP-RISC" "IRIX-MIPS32" "IRIX-MIPS64") endif() endmacro() macro (_get_compatible_system_IDs _systemID _outSystemIDs) set (${_outSystemIDs} "") if ("${_systemID}" STREQUAL "Windows-x86-64") if (Mathematica_VERSION) if (NOT "${Mathematica_VERSION}" VERSION_LESS "5.2") # Mathematica 5.2 added support for Windows-x86-64 list (APPEND ${_outSystemIDs} "Windows-x86-64") endif() else() list (APPEND ${_outSystemIDs} "Windows-x86-64") endif() if (Mathematica_VERSION) # Mathematica 12.1 dropped support for x86 if ("${Mathematica_VERSION}" VERSION_LESS "12.1") # Windows x64 can run x86 through WoW64 list (APPEND ${_outSystemIDs} "Windows") endif() else() # Windows x64 can run x86 through WoW64 list (APPEND ${_outSystemIDs} "Windows") endif() elseif ("${_systemID}" MATCHES "MacOSX|Darwin") if ("${_systemID}" MATCHES "MacOSX-x86") if (Mathematica_VERSION) # Mathematica 6 added support for MacOSX-x86-64 if (NOT "${Mathematica_VERSION}" VERSION_LESS "6.0") list (APPEND ${_outSystemIDs} "MacOSX-x86-64") endif() # Mathematica 5.2 added support for MacOSX-x86 # Mathematica 9.0 dropped support for MacOSX-x86 if (NOT "${Mathematica_VERSION}" VERSION_LESS "5.2" AND "${Mathematica_VERSION}" VERSION_LESS "9.0") list (APPEND ${_outSystemIDs} "MacOSX-x86") endif() else() list (APPEND ${_outSystemIDs} "MacOSX-x86-64" "MacOSX-x86") endif() elseif ("${_systemID}" MATCHES "MacOSX-ARM64") if (Mathematica_VERSION) # Mathematica 12.3 added support for MacOSX-ARM64 if (NOT "${Mathematica_VERSION}" VERSION_LESS "12.3") list (APPEND ${_outSystemIDs} "MacOSX-ARM64") endif() # Mathematica 6 added support for MacOSX-x86-64 if (NOT "${Mathematica_VERSION}" VERSION_LESS "6.0") list (APPEND ${_outSystemIDs} "MacOSX-x86-64") endif() else() list (APPEND ${_outSystemIDs} "MacOSX-ARM64" "MacOSX-x86-64") endif() elseif ("${_systemID}" STREQUAL "Darwin-PowerPC64") if (Mathematica_VERSION) if (NOT "${Mathematica_VERSION}" VERSION_LESS "5.2" AND "${Mathematica_VERSION}" VERSION_LESS "6.0") # Only Mathematica 5.2 supports Darwin-PowerPC64 list (APPEND ${_outSystemIDs} "Darwin-PowerPC64") endif() else() list (APPEND ${_outSystemIDs} "Darwin-PowerPC64") endif() endif() # handle ppc32 (Darwin or MacOSX) # Mac OS X versions before Lion support ppc32 natively or through Rosetta # (Mac OS X 10.7.0 is Darwin 11.0.0) if ("${CMAKE_HOST_SYSTEM_VERSION}" VERSION_LESS "11.0.0") if (Mathematica_VERSION) if ("${Mathematica_VERSION}" VERSION_LESS "6.0") # Mathematica versions before 6 used "Darwin" as system ID for ppc32 list (APPEND ${_outSystemIDs} "Darwin") elseif ("${Mathematica_VERSION}" VERSION_LESS "8.0") # Mathematica 8 dropped support for ppc32 list (APPEND ${_outSystemIDs} "MacOSX") endif() else() list (APPEND ${_outSystemIDs} "MacOSX" "Darwin") endif() endif() elseif ("${_systemID}" MATCHES "Linux-x86-64|Linux-IA64") if (Mathematica_VERSION) if (NOT "${Mathematica_VERSION}" VERSION_LESS "5.2") # Mathematica 5.2 added support for 64-bit list (APPEND ${_outSystemIDs} ${_systemID}) endif() else() list (APPEND ${_outSystemIDs} ${_systemID}) endif() # Linux 64-bit can run x86 through ia32-libs package if (Mathematica_VERSION) if ("${Mathematica_VERSION}" VERSION_LESS "11.3") # Mathematica 11.3 dropped support for 32-bit Linux list (APPEND ${_outSystemIDs} "Linux") endif() else() list (APPEND ${_outSystemIDs} "Linux") endif() else() list (APPEND ${_outSystemIDs} ${_systemID}) endif() list (REMOVE_DUPLICATES ${_outSystemIDs}) endmacro() # internal macro to compute target MathLink / WSTP DeveloperKit system ID macro(_get_developer_kit_system_IDs _outSystemIDs) if (APPLE) if (Mathematica_VERSION) if ("${Mathematica_VERSION}" VERSION_LESS "9.0") # Mathematica versions before 9 did not have a system ID subdirectory set (${_outSystemIDs} "") else() # Mathematica versions after 9 have a system ID subdirectory set (${_outSystemIDs} "MacOSX-x86-64" "MacOSX-ARM64") endif() else() _get_system_IDs(${_outSystemIDs}) endif() else() _get_system_IDs(${_outSystemIDs}) endif() endmacro() # internal macro to compute host MathLink / WSTP DeveloperKit system ID macro(_get_host_developer_kit_system_IDs _outSystemIDs) if (CMAKE_HOST_APPLE) if (Mathematica_VERSION) # Mathematica versions before 9 did not have a system ID subdirectory if ("${Mathematica_VERSION}" VERSION_LESS "9.0") set (${_outSystemIDs} "") else() # Mathematica versions after 9 have a system ID subdirectory set (${_outSystemIDs} "MacOSX-x86-64" "MacOSX-ARM64") endif() else() _get_host_system_IDs(${_outSystemIDs}) endif() else() _get_host_system_IDs(${_outSystemIDs}) endif() endmacro() # internal macro to compute target development flavor macro (_get_target_flavor _outFlavor) if (CYGWIN) set (${_outFlavor} "cygwin") elseif (WIN32) if (CMAKE_SIZEOF_VOID_P EQUAL 8) set (${_outFlavor} "mldev64") else() set (${_outFlavor} "mldev32") endif() elseif (APPLE) set (${_outFlavor} "") if (Mathematica_VERSION) if (Mathematica_USE_LIBCXX_LIBRARIES AND NOT "${Mathematica_VERSION}" VERSION_LESS "10.0" AND "${Mathematica_VERSION}" VERSION_LESS "10.4") # Mathematica 10 added LLVM libc++ compiled version in AlternativeLibraries directory # Mathematica 10.4 and later only ship with LLVM libc++ compiled version set (${_outFlavor} "AlternativeLibraries") endif() endif() else() # no flavors on non-Windows platforms set (${_outFlavor} "") endif() endmacro() # internal macro to compute host development flavor macro (_get_host_flavor _outFlavor) if (CYGWIN) set (${_outFlavor} "cygwin") elseif (CMAKE_HOST_WIN32) set (${_outFlavor} "mldev32") if (DEFINED ENV{PROCESSOR_ARCHITEW6432}) if ("$ENV{PROCESSOR_ARCHITEW6432}" STREQUAL "AMD64") # running of WoW64, host is native 64-bit Windows set (${_outFlavor} "mldev64") endif() elseif (DEFINED ENV{PROCESSOR_ARCHITECTURE}) if ("$ENV{PROCESSOR_ARCHITECTURE}" STREQUAL "AMD64") # host is native 64-bit Windows set (${_outFlavor} "mldev64") endif() endif() elseif (CMAKE_HOST_APPLE) set (${_outFlavor} "") if (Mathematica_VERSION) if (Mathematica_USE_LIBCXX_LIBRARIES AND NOT "${Mathematica_VERSION}" VERSION_LESS "10.0" AND "${Mathematica_VERSION}" VERSION_LESS "10.4") # Mathematica 10 added LLVM libc++ compiled version in AlternativeLibraries directory # Mathematica 10.4 and later only ship with LLVM libc++ compiled version set (${_outFlavor} "AlternativeLibraries") endif() endif() else() # no flavors on non-Windows platforms set (${_outFlavor} "") endif() endmacro() # internal macro to compute WolframRTL library names macro (_get_wolfram_runtime_library_names _outLibraryNames) if (Mathematica_USE_STATIC_LIBRARIES) set (${_outLibraryNames} "WolframRTL_Static_Minimal" ) else() if (Mathematica_USE_MINIMAL_LIBRARIES) set (${_outLibraryNames} "WolframRTL_Minimal" ) else() set (${_outLibraryNames} "WolframRTL" ) endif() endif() endmacro() # internal macro to compute MathLink library names macro (_get_mathlink_library_names _outLibraryNames) if (CYGWIN) if (DEFINED Mathematica_MathLink_FIND_VERSION_MAJOR) set (${_outLibraryNames} "ML32i${Mathematica_MathLink_FIND_VERSION_MAJOR}") else() set (${_outLibraryNames} "ML32i4" "ML32i3" "ML32i2" "ML32i1") endif() elseif (WIN32) if (CMAKE_SIZEOF_VOID_P EQUAL 8) if (BORLAND) set (${_outLibraryNames} "ml64i3b" "ml64i2b") elseif (WATCOM) set (${_outLibraryNames} "ml64i3w" "ml64i2w") endif() # always add default Microsoft 64-bit PE libraries if (DEFINED Mathematica_MathLink_FIND_VERSION_MAJOR) if (Mathematica_USE_STATIC_LIBRARIES) list (APPEND ${_outLibraryNames} "ml64i${Mathematica_MathLink_FIND_VERSION_MAJOR}s") else() list (APPEND ${_outLibraryNames} "ml64i${Mathematica_MathLink_FIND_VERSION_MAJOR}m") endif() else() if (Mathematica_USE_STATIC_LIBRARIES) list (APPEND ${_outLibraryNames} "ml64i4s" "ml64i3s") else() list (APPEND ${_outLibraryNames} "ml64i4m" "ml64i3m" "ml64i2m") endif() endif() else() if (BORLAND) set (${_outLibraryNames} "ml32i3b" "ml32i2b" "ml32i1b") elseif (WATCOM) set (${_outLibraryNames} "ml32i3w" "ml32i2w" "ml32i1w") endif() # always add default Microsoft 32-bit PE libraries if (DEFINED Mathematica_MathLink_FIND_VERSION_MAJOR) if (Mathematica_USE_STATIC_LIBRARIES) list (APPEND ${_outLibraryNames} "ml32i${Mathematica_MathLink_FIND_VERSION_MAJOR}s") else() list (APPEND ${_outLibraryNames} "ml32i${Mathematica_MathLink_FIND_VERSION_MAJOR}m") endif() else() if (Mathematica_USE_STATIC_LIBRARIES) list (APPEND ${_outLibraryNames} "ml32i4s" "ml32i3s") else() list (APPEND ${_outLibraryNames} "ml32i4m" "ml32i3m" "ml32i2m" "ml32i1m") endif() endif() endif() elseif (APPLE) if (Mathematica_USE_STATIC_LIBRARIES) if (DEFINED Mathematica_MathLink_FIND_VERSION_MAJOR AND DEFINED Mathematica_MathLink_FIND_VERSION_MINOR) set (${_outLibraryNames} "libMLi${Mathematica_MathLink_FIND_VERSION_MAJOR}.${Mathematica_MathLink_FIND_VERSION_MINOR}.a") elseif (DEFINED Mathematica_MathLink_FIND_VERSION_MAJOR) set (${_outLibraryNames} "libMLi${Mathematica_MathLink_FIND_VERSION_MAJOR}.a") else() set (${_outLibraryNames} "libMLi4.a" "libMLi3.a" "libML.a") endif() else() # search for mathlink.framework set (${_outLibraryNames} "mathlink" "ML") endif() elseif (UNIX) if (Mathematica_USE_STATIC_LIBRARIES) set (_ext ".a") else() set (_ext ".so") endif() if (CMAKE_SIZEOF_VOID_P EQUAL 8) set (_arch "64") else() set (_arch "32") endif() if (DEFINED Mathematica_MathLink_FIND_VERSION_MAJOR) set (${_outLibraryNames} "libML${_arch}i${Mathematica_MathLink_FIND_VERSION_MAJOR}${_ext}") else() set (${_outLibraryNames} "libML${_arch}i4${_ext}" "libML${_arch}i3${_ext}" "libML${_ext}") endif() endif() endmacro(_get_mathlink_library_names) function (_get_mprep_output_file _templateFile _outfile) get_filename_component(_templateFile_name ${_templateFile} NAME) get_filename_component(_templateFile_ext "${_templateFile}" EXT) if (_templateFile_ext STREQUAL ".tmpp") set (${_outfile} "${_templateFile_name}.cpp" PARENT_SCOPE) elseif (_templateFile_ext STREQUAL ".tm++") set (${_outfile} "${_templateFile_name}.c++" PARENT_SCOPE) elseif (_templateFile_ext STREQUAL ".tmxx") set (${_outfile} "${_templateFile_name}.cxx" PARENT_SCOPE) else() set (${_outfile} "${_templateFile_name}.c" PARENT_SCOPE) endif() endfunction() # internal macro to compute WSTP library names macro (_get_WSTP_library_names _outLibraryNames) if (CYGWIN) if (DEFINED Mathematica_WSTP_FIND_VERSION_MAJOR) set (${_outLibraryNames} "WSTP32i${Mathematica_WSTP_FIND_VERSION_MAJOR}") else() set (${_outLibraryNames} "WSTP32i4" "WSTP32i3" "WSTP32i2" "WSTP32i1") endif() elseif (WIN32) if (CMAKE_SIZEOF_VOID_P EQUAL 8) set (_arch "64") else() set (_arch "32") endif() if (DEFINED Mathematica_WSTP_FIND_VERSION_MAJOR) if (Mathematica_USE_STATIC_LIBRARIES) list (APPEND ${_outLibraryNames} "wstp${_arch}i${Mathematica_WSTP_FIND_VERSION_MAJOR}s") else() list (APPEND ${_outLibraryNames} "wstp${_arch}i${Mathematica_WSTP_FIND_VERSION_MAJOR}m") endif() else() if (Mathematica_USE_STATIC_LIBRARIES) list (APPEND ${_outLibraryNames} "wstp${_arch}i4s" "wstp${_arch}i3s") else() list (APPEND ${_outLibraryNames} "wstp${_arch}i4m" "wstp${_arch}i3m" "wstp${_arch}i2m" "wstp${_arch}i1m") endif() endif() elseif (APPLE) if (Mathematica_USE_STATIC_LIBRARIES) if (DEFINED Mathematica_WSTP_FIND_VERSION_MAJOR AND DEFINED Mathematica_WSTP_FIND_VERSION_MINOR) set (${_outLibraryNames} "libWSTPi${Mathematica_WSTP_FIND_VERSION_MAJOR}.${Mathematica_WSTP_FIND_VERSION_MINOR}.a") elseif (DEFINED Mathematica_WSTP_FIND_VERSION_MAJOR) set (${_outLibraryNames} "libWSTPi${Mathematica_WSTP_FIND_VERSION_MAJOR}.a") else() set (${_outLibraryNames} "libWSTPi4.a" "libWSTPi3.a") endif() else() # search for wstp.framework set (${_outLibraryNames} "wstp") endif() elseif (UNIX) if (Mathematica_USE_STATIC_LIBRARIES) set (_ext ".a") else() set (_ext ".so") endif() if (CMAKE_SIZEOF_VOID_P EQUAL 8) set (_arch "64") else() set (_arch "32") endif() if (DEFINED Mathematica_WSTP_FIND_VERSION_MAJOR) set (${_outLibraryNames} "libWSTP${_arch}i${Mathematica_WSTP_FIND_VERSION_MAJOR}${_ext}") else() set (${_outLibraryNames} "libWSTP${_arch}i4${_ext}" "libWSTP${_arch}i3${_ext}") endif() endif() endmacro(_get_WSTP_library_names) # internal macro to compute Java launcher name macro (_get_jlink_java_name _outExecutabeName) if (CMAKE_HOST_WIN32) set (${_outExecutabeName} "java.exe") elseif (CMAKE_HOST_UNIX) set (${_outExecutabeName} "java") endif() endmacro() # internal macro to compute required WolframRTL system libraries macro (_append_wolframlibrary_needed_system_libraries _outLibraries) if (UNIX) if (CMAKE_SYSTEM_NAME STREQUAL "Linux") list (APPEND ${_outLibraries} pthread m ) endif() endif() endmacro() # internal macro to compute required MathLink system libraries macro (_append_mathlink_needed_system_libraries _outLibraries) if (APPLE) if (DEFINED Mathematica_MathLink_VERSION_MINOR) if ("${Mathematica_MathLink_VERSION_MINOR}" GREATER 18) # OS X MathLink API revision >= 19 has dependency on C++ standard library if (Mathematica_USE_LIBCXX_LIBRARIES) # LLVM libc++ list (APPEND ${_outLibraries} c++ ) else() # GNU libstdc++ list (APPEND ${_outLibraries} stdc++ ) endif() endif() if ("${Mathematica_MathLink_VERSION_MINOR}" GREATER 20) # Mac OS X MathLink API revision >= 21 has dependency on Core Foundation framework list (APPEND ${_outLibraries} "-framework Foundation" ) endif() endif() elseif (UNIX) if (DEFINED Mathematica_MathLink_VERSION_MINOR) if ("${Mathematica_MathLink_VERSION_MINOR}" GREATER 18) # UNIX MathLink API revision >= 19 has dependency on GNU libstdc++ list (APPEND ${_outLibraries} stdc++ ) endif() endif() if (CMAKE_SYSTEM_NAME STREQUAL "Linux") list (APPEND ${_outLibraries} m) set (CMAKE_THREAD_PREFER_PTHREAD TRUE) find_package(Threads REQUIRED) list (APPEND ${_outLibraries} "${CMAKE_THREAD_LIBS_INIT}") find_library(Mathematica_rt_LIBRARY rt) mark_as_advanced(Mathematica_rt_LIBRARY) list (APPEND ${_outLibraries} ${Mathematica_rt_LIBRARY}) if (DEFINED Mathematica_MathLink_VERSION_MINOR) if ("${Mathematica_MathLink_VERSION_MINOR}" GREATER 24) # Linux MathLink API revision >= 25 has dependency on libdl and libuuid list (APPEND ${_outLibraries} ${CMAKE_DL_LIBS}) find_library (Mathematica_uuid_LIBRARY uuid) mark_as_advanced(Mathematica_uuid_LIBRARY) list (APPEND ${_outLibraries} ${Mathematica_uuid_LIBRARY}) endif() endif() elseif (CMAKE_SYSTEM_NAME STREQUAL "SunOS") list (APPEND ${_outLibraries} m socket nsl rt ) elseif (CMAKE_SYSTEM_NAME STREQUAL "AIX") list (APPEND ${_outLibraries} m pthread ) elseif (CMAKE_SYSTEM_NAME STREQUAL "HP-UX") list (APPEND ${_outLibraries} m /usr/lib/pa20_64/libdld.sl /usr/lib/pa20_64/libm.a pthread rt ) elseif (CMAKE_SYSTEM_NAME STREQUAL "IRIX") list (APPEND ${_outLibraries} m pthread ) endif() elseif (WIN32) if (DEFINED Mathematica_MathLink_VERSION_MINOR) if ("${Mathematica_MathLink_VERSION_MINOR}" GREATER 19) # Windows MathLink API revision >= 20 has dependency on Winsock 2 list (APPEND ${_outLibraries} Ws2_32.lib ) endif() if ("${Mathematica_MathLink_VERSION_MINOR}" GREATER 24) # Windows MathLink API interface >= 25 has dependency on RPC list (APPEND ${_outLibraries} Rpcrt4.lib ) endif() endif() endif() endmacro() # internal macro to compute required WSTP system libraries macro (_append_WSTP_needed_system_libraries _outLibraries) if (APPLE) if (DEFINED Mathematica_WSTP_VERSION_MINOR) if ("${Mathematica_WSTP_VERSION_MINOR}" GREATER 18) # OS X WSTP API revision >= 19 has dependency on C++ standard library if (Mathematica_USE_LIBCXX_LIBRARIES) # LLVM libc++ list (APPEND ${_outLibraries} c++ ) else() # GNU libstdc++ list (APPEND ${_outLibraries} stdc++ ) endif() endif() if ("${Mathematica_WSTP_VERSION_MINOR}" GREATER 20) # Mac OS X WSTP API revision >= 21 has dependency on Core Foundation framework list (APPEND ${_outLibraries} "-framework Foundation" ) endif() endif() elseif (UNIX) if (CMAKE_SYSTEM_NAME STREQUAL "Linux") if (DEFINED Mathematica_WSTP_VERSION_MINOR) if ("${Mathematica_WSTP_VERSION_MINOR}" GREATER 18) # UNIX WSTP API revision >= 19 has dependency on GNU libstdc++ list (APPEND ${_outLibraries} stdc++ ) endif() endif() list (APPEND ${_outLibraries} m pthread rt ) if (DEFINED Mathematica_WSTP_VERSION_MINOR) if ("${Mathematica_WSTP_VERSION_MINOR}" GREATER 24) # Linux WSTP API revision >= 25 has dependency on libdl and libuuid list (APPEND ${_outLibraries} ${CMAKE_DL_LIBS}) find_library (Mathematica_uuid_LIBRARY uuid) mark_as_advanced(Mathematica_uuid_LIBRARY) list (APPEND ${_outLibraries} ${Mathematica_uuid_LIBRARY}) endif() endif() endif() elseif (WIN32) if (DEFINED Mathematica_WSTP_VERSION_MINOR) if ("${Mathematica_WSTP_VERSION_MINOR}" GREATER 19) # Windows WSTP API revision >= 20 has dependency on Winsock 2 list (APPEND ${_outLibraries} Ws2_32.lib ) endif() if ("${Mathematica_WSTP_VERSION_MINOR}" GREATER 24) # Windows WSTP API interface >= 25 has dependency on RPC list (APPEND ${_outLibraries} Rpcrt4.lib ) endif() endif() endif() endmacro() # internal macro to return dynamic library search path environment variables on host platform macro (_get_host_library_search_path_envvars _outVariableNames) set (${_outVariableNames} "") if (CMAKE_HOST_APPLE) list (APPEND ${_outVariableNames} "DYLD_FRAMEWORK_PATH" "DYLD_LIBRARY_PATH") elseif (CYGWIN) list (APPEND ${_outVariableNames} "PATH" "LD_LIBRARY_PATH") elseif (CMAKE_HOST_WIN32) list (APPEND ${_outVariableNames} "PATH") elseif (CMAKE_HOST_UNIX) if ("${CMAKE_HOST_SYSTEM_NAME}" STREQUAL "SunOS") list (APPEND ${_outVariableNames} "LD_LIBRARY_PATH_64") elseif ("${CMAKE_HOST_SYSTEM_NAME}" STREQUAL "AIX") list (APPEND ${_outVariableNames} "LIBPATH") elseif ("${CMAKE_HOST_SYSTEM_NAME}" STREQUAL "HP-UX") list (APPEND ${_outVariableNames} "SHLIB_PATH") elseif ("${CMAKE_HOST_SYSTEM_NAME}" STREQUAL "IRIX") list (APPEND ${_outVariableNames} "LD_LIBRARY64_PATH") endif() list (APPEND ${_outVariableNames} "LD_LIBRARY_PATH") endif() endmacro() # internal macro to convert list to a search path list for host platform function (_to_native_path_list _outPathList) set (_nativePathList "") foreach (_path ${ARGN}) _to_native_path("${_path}" _nativePath) list (APPEND _nativePathList "${_nativePath}") endforeach() if (CMAKE_HOST_UNIX) string (REPLACE ";" ":" _nativePathList "${_nativePathList}") elseif (CMAKE_HOST_WIN32) # prevent CMake from interpreting ; as a list separator string (REPLACE ";" "\\;" _nativePathList "${_nativePathList}") endif() set (${_outPathList} "${_nativePathList}" PARENT_SCOPE) endfunction() function (_to_cmake_path_list _outPathList) set (_cmakePathList "") foreach (_path ${ARGN}) _to_cmake_path("${_path}" _cmakePath) list (APPEND _cmakePathList "${_cmakePath}") endforeach() if (CMAKE_HOST_UNIX) string (REPLACE ";" ":" _cmakePathList "${_cmakePathList}") elseif (CMAKE_HOST_WIN32) # prevent CMake from interpreting ; as a list separator string (REPLACE ";" "\\;" _cmakePathList "${_cmakePathList}") endif() set (${_outPathList} "${_cmakePathList}" PARENT_SCOPE) endfunction() # internal macro to select runtime libraries according to build type macro (_select_configuration_run_time_dirs _outRuntimeDirs) set (${_outRuntimeDirs} ${Mathematica_RUNTIME_LIBRARY_DIRS}) if (DEFINED CMAKE_BUILD_TYPE) if ("${CMAKE_BUILD_TYPE}" STREQUAL "Debug") set (${_outRuntimeDirs} ${Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG}) endif() endif() endmacro() # internal macro to set up Mathematica host system IDs macro (_setup_mathematica_systemIDs) _get_system_IDs(Mathematica_SYSTEM_IDS) # default target platform system ID is first one in Mathematica_SYSTEM_IDS list(GET Mathematica_SYSTEM_IDS 0 Mathematica_SYSTEM_ID) if (Mathematica_RUN_KERNEL_ON_CONFIGURE AND COMMAND Mathematica_EXECUTE) # determine true host system ID which depends on both Mathematica version # and OS variant by running Mathematica kernel Mathematica_EXECUTE( CODE "Print[StandardForm[$SystemID]]" OUTPUT_VARIABLE Mathematica_KERNEL_HOST_SYSTEM_ID CACHE DOC "Actual Mathematica host system ID." TIMEOUT 10) if (NOT Mathematica_KERNEL_HOST_SYSTEM_ID) message (WARNING "Cannot accurately determine Mathematica host system ID.") endif() endif() if (Mathematica_KERNEL_HOST_SYSTEM_ID) if (Mathematica_KERNEL_HOST_SYSTEM_ID MATCHES "[a-zA-Z0-9_-]+") set (Mathematica_HOST_SYSTEM_ID "${Mathematica_KERNEL_HOST_SYSTEM_ID}") else() unset (Mathematica_KERNEL_HOST_SYSTEM_ID CACHE) endif() else() # guess host system ID from the environment _get_host_system_IDs(_HostSystemIDs) # default to first ID in _HostSystemIDs list (GET _HostSystemIDs 0 Mathematica_HOST_SYSTEM_ID) endif() _get_compatible_system_IDs(${Mathematica_HOST_SYSTEM_ID} Mathematica_HOST_SYSTEM_IDS) endmacro() # internal macro to set up Mathematica creation ID macro (_setup_mathematica_creationID) if (DEFINED Mathematica_ROOT_DIR) if (EXISTS "${Mathematica_ROOT_DIR}/.CreationID") # parse hidden CreationID file file (STRINGS "${Mathematica_ROOT_DIR}/.CreationID" Mathematica_CREATION_ID REGEX "[0-9]+") elseif (CMAKE_HOST_APPLE AND EXISTS "${Mathematica_ROOT_DIR}/Contents/Info.plist") execute_process( COMMAND "grep" "--after-context=1" "CFBundleShortVersionString" "${Mathematica_ROOT_DIR}/Contents/Info.plist" TIMEOUT 10 OUTPUT_VARIABLE _versionStr ERROR_QUIET) if (_versionStr MATCHES "\\.([0-9]+)") # OS X Info.plist CFBundleShortVersionString has Creation ID as last version component set (Mathematica_CREATION_ID "${CMAKE_MATCH_1}") else() set (_versionLine "") endif() endif() endif() if (NOT DEFINED Mathematica_CREATION_ID AND DEFINED Mathematica_CREATION_ID_LAST) set (Mathematica_CREATION_ID ${Mathematica_CREATION_ID_LAST}) endif() endmacro() # internal macro to set up Mathematica base directory variable macro (_setup_mathematica_base_directory) if (Mathematica_RUN_KERNEL_ON_CONFIGURE AND COMMAND Mathematica_EXECUTE) # determine true $BaseDirectory Mathematica_EXECUTE( CODE "Print[StandardForm[$BaseDirectory]]" OUTPUT_VARIABLE Mathematica_KERNEL_BASE_DIR CACHE DOC "Actual Mathematica $BaseDirectory." TIMEOUT 10) if (NOT Mathematica_KERNEL_BASE_DIR) message (WARNING "Cannot accurately determine Mathematica $BaseDirectory.") endif() endif() if (Mathematica_KERNEL_BASE_DIR) if (IS_ABSOLUTE "${Mathematica_KERNEL_BASE_DIR}") set (Mathematica_BASE_DIR "${Mathematica_KERNEL_BASE_DIR}") else() unset (Mathematica_KERNEL_BASE_DIR CACHE) endif() else () # guess Mathematica_BASE_DIR from environment # environment variable MATHEMATICA_BASE may override default # $BaseDirectory, see # https://reference.wolfram.com/language/tutorial/ConfigurationFiles.html if (DEFINED ENV{MATHEMATICA_BASE}) set (Mathematica_BASE_DIR "$ENV{MATHEMATICA_BASE}") elseif (CMAKE_HOST_WIN32 OR CYGWIN) if (DEFINED $ENV{PROGRAMDATA}) set (Mathematica_BASE_DIR "$ENV{PROGRAMDATA}\\Mathematica") elseif (DEFINED ENV{ALLUSERSAPPDATA}) set (Mathematica_BASE_DIR "$ENV{ALLUSERSAPPDATA}\\Mathematica") elseif (DEFINED ENV{USERPROFILE} AND DEFINED ENV{ALLUSERSPROFILE} AND DEFINED ENV{APPDATA}) string (REPLACE "$ENV{USERPROFILE}" "$ENV{ALLUSERSPROFILE}" Mathematica_BASE_DIR "$ENV{APPDATA}\\Mathematica") endif() elseif (CMAKE_HOST_APPLE) set (Mathematica_BASE_DIR "/Library/Mathematica") elseif (CMAKE_HOST_UNIX) set (Mathematica_BASE_DIR "/usr/share/Mathematica") endif() endif() if (Mathematica_BASE_DIR) get_filename_component(Mathematica_BASE_DIR "${Mathematica_BASE_DIR}" ABSOLUTE) _to_cmake_path("${Mathematica_BASE_DIR}" Mathematica_BASE_DIR) else() set (Mathematica_BASE_DIR "Mathematica_BASE_DIR-NOTFOUND") message (WARNING "Cannot determine Mathematica base directory.") endif() endmacro() # internal macro to set up Mathematica user base directory variable macro (_setup_mathematica_userbase_directory) if (Mathematica_RUN_KERNEL_ON_CONFIGURE AND COMMAND Mathematica_EXECUTE) # determine true $UserBaseDirectory Mathematica_EXECUTE( CODE "Print[StandardForm[$UserBaseDirectory]]" OUTPUT_VARIABLE Mathematica_KERNEL_USERBASE_DIR CACHE DOC "Actual Mathematica $UserBaseDirectory." TIMEOUT 10) if (NOT Mathematica_KERNEL_USERBASE_DIR) message (WARNING "Cannot accurately determine Mathematica $UserBaseDirectory.") endif() endif() if (Mathematica_KERNEL_USERBASE_DIR) if (IS_ABSOLUTE "${Mathematica_KERNEL_USERBASE_DIR}") set (Mathematica_USERBASE_DIR "${Mathematica_KERNEL_USERBASE_DIR}") else() unset (Mathematica_KERNEL_USERBASE_DIR CACHE) endif() else () # guess Mathematica_USERBASE_DIR from environment # environment variable MATHEMATICA_USERBASE may override default # $UserBaseDirectory, see # https://reference.wolfram.com/language/tutorial/ConfigurationFiles.html if (DEFINED ENV{MATHEMATICA_USERBASE}) set (Mathematica_USERBASE_DIR "$ENV{MATHEMATICA_USERBASE}") elseif (CMAKE_HOST_WIN32 OR CYGWIN) if (DEFINED ENV{APPDATA}) set (Mathematica_USERBASE_DIR "$ENV{APPDATA}\\Mathematica") endif() elseif (CMAKE_HOST_APPLE) if (DEFINED ENV{HOME}) set (Mathematica_USERBASE_DIR "$ENV{HOME}/Library/Mathematica") endif() elseif (CMAKE_HOST_UNIX) if (DEFINED ENV{HOME}) set (Mathematica_USERBASE_DIR "$ENV{HOME}/.Mathematica") endif() endif() endif() if (Mathematica_USERBASE_DIR) get_filename_component(Mathematica_USERBASE_DIR "${Mathematica_USERBASE_DIR}" ABSOLUTE) _to_cmake_path("${Mathematica_USERBASE_DIR}" Mathematica_USERBASE_DIR) else() set (Mathematica_USERBASE_DIR "Mathematica_USERBASE_DIR-NOTFOUND") message (WARNING "Cannot determine Mathematica user base directory.") endif() endmacro() # internal macro to setup FindMathematica option variables macro (_setup_findmathematica_options) if (NOT DEFINED Mathematica_USE_STATIC_LIBRARIES_INIT) if (DEFINED Mathematica_USE_STATIC_LIBRARIES) set (Mathematica_USE_STATIC_LIBRARIES_INIT ${Mathematica_USE_STATIC_LIBRARIES}) else() set (Mathematica_USE_STATIC_LIBRARIES_INIT FALSE) endif() endif() option (Mathematica_USE_STATIC_LIBRARIES "prefer static Mathematica libraries to dynamic libraries?" ${Mathematica_USE_STATIC_LIBRARIES_INIT}) if (NOT DEFINED Mathematica_USE_MINIMAL_LIBRARIES_INIT) if (DEFINED Mathematica_USE_MINIMAL_LIBRARIES) set (Mathematica_USE_MINIMAL_LIBRARIES_INIT ${Mathematica_USE_MINIMAL_LIBRARIES}) else() set (Mathematica_USE_MINIMAL_LIBRARIES_INIT FALSE) endif() endif() option (Mathematica_USE_MINIMAL_LIBRARIES "prefer minimal Mathematica libraries to full libraries?" ${Mathematica_USE_MINIMAL_LIBRARIES_INIT}) if (NOT DEFINED Mathematica_USE_LIBCXX_LIBRARIES_INIT) if (DEFINED Mathematica_USE_LIBCXX_LIBRARIES) set (Mathematica_USE_LIBCXX_LIBRARIES_INIT ${Mathematica_USE_LIBCXX_LIBRARIES}) else() # starting with OS X 10.9, Clang uses libc++ by default if (APPLE AND NOT "${CMAKE_SYSTEM_VERSION}" VERSION_LESS "13.0.0" AND "${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang") set (Mathematica_USE_LIBCXX_LIBRARIES_INIT TRUE) else() set (Mathematica_USE_LIBCXX_LIBRARIES_INIT FALSE) endif() endif() endif() option (Mathematica_USE_LIBCXX_LIBRARIES "prefer Mathematica libraries linked with LLVM libc++ to those linked with GNU libstdc++?" ${Mathematica_USE_LIBCXX_LIBRARIES_INIT}) if (NOT DEFINED Mathematica_DEBUG_INIT) if (DEFINED Mathematica_DEBUG) set (Mathematica_DEBUG_INIT ${Mathematica_DEBUG}) else() set (Mathematica_DEBUG_INIT FALSE) endif() endif() option (Mathematica_DEBUG "enable FindMathematica debugging output?" ${Mathematica_DEBUG_INIT}) if (NOT DEFINED Mathematica_RUN_KERNEL_ON_CONFIGURE_INIT) if (DEFINED Mathematica_RUN_KERNEL_ON_CONFIGURE) set (Mathematica_RUN_KERNEL_ON_CONFIGURE_INIT ${Mathematica_RUN_KERNEL_ON_CONFIGURE}) else() set (Mathematica_RUN_KERNEL_ON_CONFIGURE_INIT TRUE) endif() endif() option (Mathematica_RUN_KERNEL_ON_CONFIGURE "allow FindMathematica to implicitly run the Mathematica kernel at CMake configure time?" ${Mathematica_RUN_KERNEL_ON_CONFIGURE_INIT}) endmacro() # internal macro to find Mathematica installation macro (_find_mathematica) _get_host_frontend_names(_FrontEndExecutables) _get_host_kernel_names(_KernelExecutables) if (Mathematica_DEBUG) message (STATUS "FrontEndExecutables ${_FrontEndExecutables}") message (STATUS "KernelExecutables ${_KernelExecutables}") endif() set (_helpStr "Mathematica host installation root directory.") if (NOT DEFINED Mathematica_HOST_ROOT_DIR) set (_doSearch TRUE) elseif (NOT EXISTS "${Mathematica_HOST_ROOT_DIR}") set (_doSearch TRUE) else() set (_doSearch FALSE) endif() if (_doSearch) _get_search_paths(_SearchPaths) _get_program_names(_ProgramNames) if (Mathematica_DEBUG) message (STATUS "SearchPaths ${_SearchPaths}") message (STATUS "ProgramNames ${_ProgramNames}") message (STATUS "KernelExecutables ${_KernelExecutables}") endif() find_path (Mathematica_HOST_ROOT_DIR NAMES ${_KernelExecutables} PATH_SUFFIXES ${_ProgramNames} PATHS ${_SearchPaths} ENV MATHEMATICA_HOME DOC "${_helpStr}" NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) else() # preserve pre-defined value, but set correct type and help string set_property(CACHE Mathematica_HOST_ROOT_DIR PROPERTY TYPE PATH) set_property(CACHE Mathematica_HOST_ROOT_DIR PROPERTY HELPSTRING "${_helpStr}") endif() # Mathematica_ROOT_DIR is initialized to Mathematica_HOST_ROOT_DIR by default # upon cross-compiling Mathematica_ROOT_DIR needs to be manually set to the correct # Mathematica installation folder for the target platform set (_helpStr "Mathematica target installation root directory.") if (NOT DEFINED Mathematica_ROOT_DIR) set (Mathematica_ROOT_DIR ${Mathematica_HOST_ROOT_DIR} CACHE PATH "${_helpStr}") elseif (NOT EXISTS "${Mathematica_ROOT_DIR}") set (Mathematica_ROOT_DIR ${Mathematica_HOST_ROOT_DIR} CACHE PATH "${_helpStr}") else() # preserve pre-defined value, but set correct type and help string set_property(CACHE Mathematica_ROOT_DIR PROPERTY TYPE PATH) set_property(CACHE Mathematica_ROOT_DIR PROPERTY HELPSTRING "${_helpStr}") endif() find_program (Mathematica_KERNEL_EXECUTABLE NAMES ${_KernelExecutables} HINTS ${Mathematica_HOST_ROOT_DIR} DOC "Mathematica kernel executable." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) find_program (Mathematica_FRONTEND_EXECUTABLE NAMES ${_FrontEndExecutables} HINTS ${Mathematica_HOST_ROOT_DIR} DOC "Mathematica front end executable." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) find_path (Mathematica_INCLUDE_DIR NAMES "mdefs.h" HINTS "${Mathematica_ROOT_DIR}/SystemFiles/IncludeFiles" "${Mathematica_ROOT_DIR}/Contents/SystemFiles/IncludeFiles" "${Mathematica_ROOT_DIR}/Contents/Resources/Wolfram Player.app/Contents/SystemFiles/IncludeFiles" PATH_SUFFIXES "C" DOC "Mathematica C language definitions include directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (Mathematica_INCLUDE_DIR) set (Mathematica_INCLUDE_DIRS ${Mathematica_INCLUDE_DIR}) else() set (Mathematica_INCLUDE_DIRS "") endif() set (Mathematica_LIBRARIES "") set (Mathematica_LIBRARY_DIRS "") set (Mathematica_RUNTIME_LIBRARY_DIRS "") set (Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG "") endmacro(_find_mathematica) # internal macro to init _LIBRARIES variable from given _LIBRARY variable macro (_setup_libraries_var _library_var _libraries_var) if (APPLE) # handle universal builds under Mac OS X # we need to add a library for each architecture _get_system_IDs(_SystemIDs) foreach (_systemID IN LISTS _SystemIDs) if ("${${_library_var}}" MATCHES "/${_systemID}/") set (_primarySystemID "${_systemID}") endif() endforeach() if (_primarySystemID) set (${_libraries_var} "") foreach (_systemID IN LISTS _SystemIDs) string (REPLACE "/${_primarySystemID}/" "/${_systemID}/" _library "${${_library_var}}") if (EXISTS "${_library}") list (APPEND ${_libraries_var} "${_library}") endif() endforeach() else() set (${_libraries_var} ${${_library_var}}) endif() else() set (${_libraries_var} ${${_library_var}}) endif() endmacro() # internal macro to find Wolfram Library inside Mathematica installation macro (_find_wolframlibrary) if (NOT DEFINED Mathematica_ROOT_DIR) _find_mathematica() endif() _get_system_IDs(_SystemIDs) _get_wolfram_runtime_library_names(_WolframRuntimeLibraryNames) if (Mathematica_DEBUG) message (STATUS "WolframLibrary SystemID ${_SystemIDs}") message (STATUS "WolframRuntimeLibraryNames ${_WolframRuntimeLibraryNames}") endif() set (_findLibraryPrefixesSave "${CMAKE_FIND_LIBRARY_PREFIXES}") set (_findLibrarySuffixesSave "${CMAKE_FIND_LIBRARY_SUFFIXES}") if (CYGWIN) # Wolfram RTL library names do not follow UNIX conventions under Cygwin set (CMAKE_FIND_LIBRARY_PREFIXES "") set (CMAKE_FIND_LIBRARY_SUFFIXES ".lib") endif() find_library (Mathematica_WolframLibrary_LIBRARY NAMES ${_WolframRuntimeLibraryNames} HINTS "${Mathematica_ROOT_DIR}/SystemFiles/Libraries" "${Mathematica_ROOT_DIR}/Contents/SystemFiles/Libraries" "${Mathematica_ROOT_DIR}/Contents/Resources/Wolfram Player.app/Contents/SystemFiles/Libraries" PATH_SUFFIXES ${_SystemIDs} DOC "Mathematica Wolfram Runtime Library." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) find_path (Mathematica_WolframLibrary_INCLUDE_DIR NAMES "WolframLibrary.h" "WolframRTL.h" HINTS "${Mathematica_ROOT_DIR}/SystemFiles/IncludeFiles" "${Mathematica_ROOT_DIR}/Contents/SystemFiles/IncludeFiles" "${Mathematica_ROOT_DIR}/Contents/Resources/Wolfram Player.app/Contents/SystemFiles/IncludeFiles" PATH_SUFFIXES "C" DOC "Mathematica WolframLibrary include directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (Mathematica_WolframLibrary_INCLUDE_DIR) list (APPEND Mathematica_INCLUDE_DIRS ${Mathematica_WolframLibrary_INCLUDE_DIR}) endif() set (CMAKE_FIND_LIBRARY_PREFIXES "${_findLibraryPrefixesSave}") set (CMAKE_FIND_LIBRARY_SUFFIXES "${_findLibrarySuffixesSave}") endmacro() # internal macro to find MathLink SDK inside Mathematica installation macro (_find_mathlink) _get_developer_kit_system_IDs(_SystemIDs) _get_host_developer_kit_system_IDs(_HostSystemIDs) _get_target_flavor(_MathLinkFlavor) _get_host_flavor(_HostMathLinkFlavor) _get_mathlink_library_names(_MathLinkLibraryNames) if (NOT DEFINED Mathematica_ROOT_DIR OR NOT DEFINED Mathematica_HOST_ROOT_DIR) _find_mathematica() endif() if (Mathematica_DEBUG) message (STATUS "MathLink Target DeveloperKit SystemID ${_SystemIDs} ${_MathLinkFlavor}") message (STATUS "MathLink Host DeveloperKit SystemID ${_HostSystemIDs} ${_HostMathLinkFlavor}") message (STATUS "MathLink Library Names ${_MathLinkLibraryNames}") endif() find_path (Mathematica_MathLink_ROOT_DIR NAMES "CompilerAdditions" HINTS "${Mathematica_ROOT_DIR}/SystemFiles/Links/MathLink/DeveloperKit" "${Mathematica_ROOT_DIR}/Contents/SystemFiles/Links/MathLink/DeveloperKit" "${Mathematica_ROOT_DIR}/Contents/Resources/Wolfram Player.app/Contents/SystemFiles/Links/MathLink/DeveloperKit" "${Mathematica_ROOT_DIR}/AddOns/MathLink/DeveloperKit" PATH_SUFFIXES ${_SystemIDs} DOC "MathLink target SDK root directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (_MathLinkFlavor) set (_CompilerAdditions "${Mathematica_MathLink_ROOT_DIR}/CompilerAdditions/${_MathLinkFlavor}" "${Mathematica_MathLink_ROOT_DIR}/CompilerAdditions") else() set (_CompilerAdditions "${Mathematica_MathLink_ROOT_DIR}/CompilerAdditions") endif() find_path (Mathematica_MathLink_HOST_ROOT_DIR NAMES "CompilerAdditions" HINTS "${Mathematica_HOST_ROOT_DIR}/SystemFiles/Links/MathLink/DeveloperKit" "${Mathematica_HOST_ROOT_DIR}/Contents/SystemFiles/Links/MathLink/DeveloperKit" "${Mathematica_HOST_ROOT_DIR}/Contents/Resources/Wolfram Player.app/Contents/SystemFiles/Links/MathLink/DeveloperKit" "${Mathematica_HOST_ROOT_DIR}/AddOns/MathLink/DeveloperKit" PATH_SUFFIXES ${_HostSystemIDs} DOC "MathLink host SDK root directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (_HostMathLinkFlavor) set (_HostCompilerAdditions "${Mathematica_MathLink_HOST_ROOT_DIR}/CompilerAdditions/${_HostMathLinkFlavor}" "${Mathematica_MathLink_HOST_ROOT_DIR}/CompilerAdditions") else() set (_HostCompilerAdditions "${Mathematica_MathLink_HOST_ROOT_DIR}/CompilerAdditions") endif() if (Mathematica_DEBUG) message (STATUS "MathLink CompilerAdditions ${_CompilerAdditions}") message (STATUS "MathLink HostCompilerAdditions ${_HostCompilerAdditions}") endif() if (APPLE) set (_findFrameWorkSave "${CMAKE_FIND_FRAMEWORK}") if (Mathematica_USE_STATIC_LIBRARIES) set (CMAKE_FIND_FRAMEWORK "LAST") else() set (CMAKE_FIND_FRAMEWORK "FIRST") endif() endif() find_program (Mathematica_MathLink_MPREP_EXECUTABLE NAMES "mprep" HINTS ${_HostCompilerAdditions} PATH_SUFFIXES "bin" DOC "MathLink template file preprocessor executable." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) find_library (Mathematica_MathLink_LIBRARY NAMES ${_MathLinkLibraryNames} HINTS ${_CompilerAdditions} PATH_SUFFIXES "lib" DOC "MathLink library to link against." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) find_path (Mathematica_MathLink_INCLUDE_DIR NAMES "mathlink.h" HINTS ${_CompilerAdditions} PATH_SUFFIXES "include" DOC "Path to the MathLink include directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (APPLE AND DEFINED Mathematica_MathLink_FIND_VERSION_MAJOR AND IS_DIRECTORY "${Mathematica_MathLink_LIBRARY}") if (DEFINED Mathematica_MathLink_FIND_VERSION_MINOR) set (_frameworkVersionSubDir "${Mathematica_MathLink_LIBRARY}/Versions/${Mathematica_MathLink_FIND_VERSION_MAJOR}.${Mathematica_MathLink_FIND_VERSION_MINOR}") else() set (_frameworkVersionSubDir "${Mathematica_MathLink_LIBRARY}/Versions/${Mathematica_MathLink_FIND_VERSION_MAJOR}.[0-9]+") endif() file (GLOB _versionedLibrary "${_frameworkVersionSubDir}/mathlink") if (_versionedLibrary) # use last if there are multiple list (GET _versionedLibrary -1 _versionedLibrary) set (Mathematica_MathLink_LIBRARY "${_versionedLibrary}" CACHE FILEPATH "MathLink library to link against." FORCE) endif() file (GLOB _versionedHeaderDir "${_frameworkVersionSubDir}/Headers") if (_versionedHeaderDir) set (Mathematica_MathLink_INCLUDE_DIR "${_versionedHeaderDir}" CACHE FILEPATH "Path to the MathLink include directory." FORCE) endif() endif() find_path (Mathematica_MathLink_HOST_INCLUDE_DIR NAMES "mathlink.h" HINTS ${_HostCompilerAdditions} PATH_SUFFIXES "include" DOC "Path to the MathLink host include directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (APPLE) set (CMAKE_FIND_FRAMEWORK "${_findFrameWorkSave}") endif() if (Mathematica_MathLink_INCLUDE_DIR) list (APPEND Mathematica_INCLUDE_DIRS ${Mathematica_MathLink_INCLUDE_DIR}) endif() endmacro(_find_mathlink) # internal macro to find WSTP SDK inside Mathematica installation macro (_find_WSTP) _get_developer_kit_system_IDs(_SystemIDs) _get_host_developer_kit_system_IDs(_HostSystemIDs) _get_target_flavor(_WSTPFlavor) _get_host_flavor(_HostWSTPFlavor) _get_WSTP_library_names(_WSTPLibraryNames) if (NOT DEFINED Mathematica_ROOT_DIR OR NOT DEFINED Mathematica_HOST_ROOT_DIR) _find_mathematica() endif() if (Mathematica_DEBUG) message (STATUS "WSTP Target DeveloperKit SystemID ${_SystemIDs} ${_WSTPFlavor}") message (STATUS "WSTP Host DeveloperKit SystemID ${_HostSystemIDs} ${_HostWSTPFlavor}") message (STATUS "WSTP Library Names ${_WSTPLibraryNames}") endif() find_path (Mathematica_WSTP_ROOT_DIR NAMES "CompilerAdditions" HINTS "${Mathematica_ROOT_DIR}/SystemFiles/Links/WSTP/DeveloperKit" "${Mathematica_ROOT_DIR}/Contents/SystemFiles/Links/WSTP/DeveloperKit" "${Mathematica_ROOT_DIR}/Contents/Resources/Wolfram Player.app/Contents/SystemFiles/Links/WSTP/DeveloperKit" PATH_SUFFIXES ${_SystemIDs} DOC "WSTP target SDK root directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (_WSTPFlavor) set (_CompilerAdditions "${Mathematica_WSTP_ROOT_DIR}/CompilerAdditions/${_WSTPFlavor}" "${Mathematica_WSTP_ROOT_DIR}/CompilerAdditions") else() set (_CompilerAdditions "${Mathematica_WSTP_ROOT_DIR}/CompilerAdditions") endif() find_path (Mathematica_WSTP_HOST_ROOT_DIR NAMES "CompilerAdditions" HINTS "${Mathematica_HOST_ROOT_DIR}/SystemFiles/Links/WSTP/DeveloperKit" "${Mathematica_HOST_ROOT_DIR}/Contents/SystemFiles/Links/WSTP/DeveloperKit" "${Mathematica_HOST_ROOT_DIR}/Contents/Resources/Wolfram Player.app/Contents/SystemFiles/Links/WSTP/DeveloperKit" PATH_SUFFIXES ${_HostSystemIDs} DOC "WSTP host SDK root directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (_HostWSTPFlavor) set (_HostCompilerAdditions "${Mathematica_WSTP_HOST_ROOT_DIR}/CompilerAdditions/${_HostWSTPFlavor}" "${Mathematica_WSTP_HOST_ROOT_DIR}/CompilerAdditions") else() set (_HostCompilerAdditions "${Mathematica_WSTP_HOST_ROOT_DIR}/CompilerAdditions") endif() if (Mathematica_DEBUG) message (STATUS "WSTP CompilerAdditions ${_CompilerAdditions}") message (STATUS "WSTP HostCompilerAdditions ${_HostCompilerAdditions}") endif() if (APPLE) set (_findFrameWorkSave "${CMAKE_FIND_FRAMEWORK}") if (Mathematica_USE_STATIC_LIBRARIES) set (CMAKE_FIND_FRAMEWORK "LAST") else() set (CMAKE_FIND_FRAMEWORK "FIRST") endif() endif() find_program (Mathematica_WSTP_WSPREP_EXECUTABLE NAMES "wsprep" HINTS ${_HostCompilerAdditions} PATH_SUFFIXES "bin" DOC "WSTP template file preprocessor executable." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) find_library (Mathematica_WSTP_LIBRARY NAMES ${_WSTPLibraryNames} HINTS ${_CompilerAdditions} PATH_SUFFIXES "lib" DOC "WSTP library to link against." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) find_path (Mathematica_WSTP_INCLUDE_DIR NAMES "wstp.h" HINTS ${_CompilerAdditions} PATH_SUFFIXES "include" DOC "Path to the WSTP include directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (APPLE AND DEFINED Mathematica_WSTP_FIND_VERSION_MAJOR AND IS_DIRECTORY "${Mathematica_WSTP_LIBRARY}") if (DEFINED Mathematica_WSTP_FIND_VERSION_MINOR) set (_frameworkVersionSubDir "${Mathematica_WSTP_LIBRARY}/Versions/${Mathematica_WSTP_FIND_VERSION_MAJOR}.${Mathematica_WSTP_FIND_VERSION_MINOR}") else() set (_frameworkVersionSubDir "${Mathematica_WSTP_LIBRARY}/Versions/${Mathematica_WSTP_FIND_VERSION_MAJOR}.[0-9]+") endif() file (GLOB _versionedLibrary "${_frameworkVersionSubDir}/wstp") if (_versionedLibrary) # use last if there are multiple list (GET _versionedLibrary -1 _versionedLibrary) set (Mathematica_WSTP_LIBRARY "${_versionedLibrary}" CACHE FILEPATH "WSTP library to link against." FORCE) endif() file (GLOB _versionedHeaderDir "${_frameworkVersionSubDir}/Headers") if (_versionedHeaderDir) set (Mathematica_WSTP_INCLUDE_DIR "${_versionedHeaderDir}" CACHE FILEPATH "Path to the WSTP include directory." FORCE) endif() endif() find_path (Mathematica_WSTP_HOST_INCLUDE_DIR NAMES "wstp.h" HINTS ${_HostCompilerAdditions} PATH_SUFFIXES "include" DOC "Path to the WSTP host include directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (APPLE) set (CMAKE_FIND_FRAMEWORK "${_findFrameWorkSave}") endif() if (Mathematica_WSTP_INCLUDE_DIR) list (APPEND Mathematica_INCLUDE_DIRS ${Mathematica_WSTP_INCLUDE_DIR}) endif() endmacro(_find_WSTP) # internal macro to find J/Link SDK inside Mathematica installation macro (_find_jlink) if (NOT DEFINED Mathematica_ROOT_DIR) _find_mathematica() endif() _get_system_IDs(_SystemIDs) _get_host_system_IDs(_HostSystemIDs) _get_jlink_java_name(_JLinkJavaNames) if (Mathematica_DEBUG) message (STATUS "J/Link Target SystemID ${_SystemIDs}") message (STATUS "J/Link Host SystemID ${_HostSystemIDs}") message (STATUS "JLinkJavaName ${_JLinkJavaNames}") endif() find_path (Mathematica_JLink_PACKAGE_DIR NAMES "JLink.jar" HINTS "${Mathematica_ROOT_DIR}/SystemFiles/Links/JLink" "${Mathematica_ROOT_DIR}/Contents/SystemFiles/Links/JLink" "${Mathematica_ROOT_DIR}/Contents/Resources/Wolfram Player.app/Contents/SystemFiles/Links/JLink" "${Mathematica_ROOT_DIR}/AddOns/JLink" DOC "J/Link SDK root directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) if (EXISTS "${Mathematica_JLink_PACKAGE_DIR}") set (Mathematica_JLink_JAR_FILE "${Mathematica_JLink_PACKAGE_DIR}/JLink.jar") else() set (Mathematica_JLink_JAR_FILE "Mathematica_JLink_JAR_FILE-NOTFOUND") endif() set (_findLibraryPrefixesSave "${CMAKE_FIND_LIBRARY_PREFIXES}") set (_findLibrarySuffixesSave "${CMAKE_FIND_LIBRARY_SUFFIXES}") if (APPLE) set (CMAKE_FIND_LIBRARY_PREFIXES "lib") set (CMAKE_FIND_LIBRARY_SUFFIXES ".jnilib") elseif (WIN32) set (CMAKE_FIND_LIBRARY_PREFIXES "") set (CMAKE_FIND_LIBRARY_SUFFIXES ".dll") endif() find_library (Mathematica_JLink_RUNTIME_LIBRARY NAMES "JLinkNativeLibrary" HINTS "${Mathematica_JLink_PACKAGE_DIR}/SystemFiles/Libraries" PATHS ENV JLINK_LIB_DIR PATH_SUFFIXES ${_SystemIDs} DOC "J/Link native library." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) set (CMAKE_FIND_LIBRARY_PREFIXES "${_findLibraryPrefixesSave}") set (CMAKE_FIND_LIBRARY_SUFFIXES "${_findLibrarySuffixesSave}") if (CMAKE_HOST_APPLE) if (EXISTS "${Mathematica_HOST_ROOT_DIR}/Contents/SystemFiles/Java") set (_mmaJavaHome "${Mathematica_HOST_ROOT_DIR}/Contents/SystemFiles/Java") elseif (EXISTS "${Mathematica_HOST_ROOT_DIR}/Contents/Resources/Wolfram Player.app/Contents/SystemFiles/Java") set (_mmaJavaHome "${Mathematica_HOST_ROOT_DIR}/Contents/Resources/Wolfram Player.app/Contents/SystemFiles/Java") else() # OS X versions of Mathematica earlier than 10 did not have a JVM bundled # but used the Java JVM pre-installed on system set (_mmaJavaHome "${Mathematica_HOST_ROOT_DIR}/SystemFiles/Java") if (DEFINED Mathematica_VERSION) if ("${Mathematica_VERSION}" VERSION_LESS "10.0") # use java_home to find path to JVM installed on system find_program(Mathematica_JAVA_HOME_EXECUTABLE "java_home" PATHS "/usr/libexec/") mark_as_advanced(Mathematica_JAVA_HOME_EXECUTABLE) if (Mathematica_JAVA_HOME_EXECUTABLE) execute_process( COMMAND "${Mathematica_JAVA_HOME_EXECUTABLE}" "--version" "1.6" TIMEOUT 10 OUTPUT_VARIABLE _mmaJavaHome ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE) endif() endif() endif() endif() else() set (_mmaJavaHome "${Mathematica_HOST_ROOT_DIR}/SystemFiles/Java") endif() find_program (Mathematica_JLink_JAVA_EXECUTABLE NAMES "bin/${_JLinkJavaNames}" HINTS "${_mmaJavaHome}" PATH_SUFFIXES ${_HostSystemIDs} DOC "J/Link Java launcher." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) find_path (Mathematica_JLink_JAVA_HOME NAMES "bin/${_JLinkJavaNames}" HINTS "${_mmaJavaHome}" PATH_SUFFIXES ${_HostSystemIDs} DOC "J/Link Java home directory." NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH ) endmacro() # internal macro to find MUnit package macro (_find_munit_package) if (Mathematica_RUN_KERNEL_ON_CONFIGURE AND COMMAND Mathematica_FIND_PACKAGE) Mathematica_FIND_PACKAGE(Mathematica_MUnit_PACKAGE_FILE "MUnit`MUnit`") # determine enclosing MUnit package directory if (Mathematica_MUnit_PACKAGE_FILE) Mathematica_GET_PACKAGE_DIR(Mathematica_MUnit_PACKAGE_DIR "${Mathematica_MUnit_PACKAGE_FILE}") endif() endif() if (NOT DEFINED Mathematica_MUnit_PACKAGE_DIR) set (Mathematica_MUnit_PACKAGE_DIR "Mathematica_MUnit_PACKAGE_DIR-NOTFOUND") endif() endmacro() # internal macro to find LibaryLink package macro (_find_librarylink_package) if (Mathematica_RUN_KERNEL_ON_CONFIGURE AND COMMAND Mathematica_FIND_PACKAGE) Mathematica_FIND_PACKAGE(Mathematica_LibraryLink_PACKAGE_FILE "LibraryLink`LibraryLink`") # determine enclosing LibraryLink package directory if (Mathematica_LibraryLink_PACKAGE_FILE) Mathematica_GET_PACKAGE_DIR(Mathematica_LibraryLink_PACKAGE_DIR "${Mathematica_LibraryLink_PACKAGE_FILE}") endif() endif() if (NOT DEFINED Mathematica_LibraryLink_PACKAGE_DIR) set (Mathematica_LibraryLink_PACKAGE_DIR "Mathematica_LibraryLink_PACKAGE_DIR-NOTFOUND") endif() endmacro() # internal helper macro to setup version related variables from existing _VERSION variable macro (_setup_package_version_variables _packageName) if (DEFINED ${_packageName}_VERSION) string (REGEX MATCHALL "[0-9]+" _versionComponents "${${_packageName}_VERSION}") list (LENGTH _versionComponents _len) if (_len GREATER 0) list(GET _versionComponents 0 ${_packageName}_VERSION_MAJOR) endif() if (_len GREATER 1) list(GET _versionComponents 1 ${_packageName}_VERSION_MINOR) endif() if (_len GREATER 2) list(GET _versionComponents 2 ${_packageName}_VERSION_PATCH) endif() if (_len GREATER 3) list(GET _versionComponents 3 ${_packageName}_VERSION_TWEAK) endif() set (${_packageName}_VERSION_COUNT ${_len}) if (NOT DEFINED ${_packageName}_VERSION_STRING) set (${_packageName}_VERSION_STRING ${${_packageName}_VERSION}) endif() endif() endmacro() # internal macro to setup Mathematica version related variables macro (_setup_mathematica_version_variables) if (NOT Mathematica_VERSION) set (_versionLine "") if (DEFINED Mathematica_ROOT_DIR) if (Mathematica_ROOT_DIR AND EXISTS "${Mathematica_ROOT_DIR}/.VersionID") # parse version number from hidden VersionID and PatchLevel files file (STRINGS "${Mathematica_ROOT_DIR}/.VersionID" _versionLine) if (EXISTS "${Mathematica_ROOT_DIR}/.PatchLevel") file (STRINGS "${Mathematica_ROOT_DIR}/.PatchLevel" _patchLevel) if (_versionLine MATCHES ".+" AND _patchLevel MATCHES ".+") set (_versionLine "${_versionLine}.${_patchLevel}") endif() endif() elseif (CMAKE_HOST_APPLE AND Mathematica_ROOT_DIR AND EXISTS "${Mathematica_ROOT_DIR}/Contents/Info.plist") execute_process( COMMAND "grep" "--after-context=1" "CFBundleShortVersionString" "${Mathematica_ROOT_DIR}/Contents/Info.plist" TIMEOUT 10 OUTPUT_VARIABLE _versionStr ERROR_QUIET) if (_versionStr MATCHES "([0-9]+\\.[0-9]+\\.[0-9]+)") set (_versionLine "${CMAKE_MATCH_1}") else() set (_versionLine "") endif() endif() endif() if (NOT _versionLine AND DEFINED Mathematica_MathLink_INCLUDE_DIR) if (Mathematica_MathLink_INCLUDE_DIR AND EXISTS "${Mathematica_MathLink_INCLUDE_DIR}/mathlink.h") # parse version number from mathlink.h file (STRINGS "${Mathematica_MathLink_INCLUDE_DIR}/mathlink.h" _versionLine REGEX ".*define.*MLMATHVERSION.*") endif() endif() if (NOT _versionLine AND DEFINED Mathematica_MathLink_HOST_INCLUDE_DIR) if (Mathematica_MathLink_HOST_INCLUDE_DIR AND EXISTS "${Mathematica_MathLink_HOST_INCLUDE_DIR}/mathlink.h") # parse version number from mathlink.h file (STRINGS "${Mathematica_MathLink_HOST_INCLUDE_DIR}/mathlink.h" _versionLine REGEX ".*define.*MLMATHVERSION.*") endif() endif() if (_versionLine MATCHES ".+") string (REGEX REPLACE "[^0-9]*([0-9]+(\\.[0-9]+)*).*" "\\1" _versionStr "${_versionLine}") if (DEFINED _versionStr) set (Mathematica_VERSION "${_versionStr}" CACHE INTERNAL "Mathematica version." FORCE) endif() endif() endif() _setup_package_version_variables(Mathematica) endmacro() # internal macro to setup WolframLibrary version related variables macro (_setup_wolframlibrary_version_variables) if (NOT Mathematica_WolframLibrary_VERSION AND Mathematica_WolframLibrary_INCLUDE_DIR) set (_file "${Mathematica_WolframLibrary_INCLUDE_DIR}/WolframLibrary.h") if (EXISTS "${_file}") file (STRINGS "${_file}" _versionLine REGEX ".*define.*WolframLibraryVersion.*") if (_versionLine) string (REGEX REPLACE "[^0-9]*([0-9]+(\\.[0-9]+)*).*" "\\1" _versionStr "${_versionLine}") if (DEFINED _versionStr) set (Mathematica_WolframLibrary_VERSION "${_versionStr}" CACHE INTERNAL "WolframLibrary version." FORCE) endif() endif() endif() endif() _setup_package_version_variables(Mathematica_WolframLibrary) endmacro() # internal macro to setup MathLink version related variables macro (_setup_mathlink_version_variables) if (NOT Mathematica_MathLink_VERSION AND Mathematica_MathLink_INCLUDE_DIR) set (_file "${Mathematica_MathLink_INCLUDE_DIR}/mathlink.h") if (EXISTS "${_file}") if (DEFINED Mathematica_MathLink_FIND_VERSION_MAJOR) set (_mlInterface "${Mathematica_MathLink_FIND_VERSION_MAJOR}") else() file (STRINGS "${_file}" _mlInterfaceLine REGEX ".*define.*MLINTERFACE.*") string (REGEX REPLACE "[^0-9]*([0-9]+).*" "\\1" _mlInterface ${_mlInterfaceLine}) endif() file (STRINGS "${_file}" _mlRevisionLine REGEX ".*define.*MLREVISION.*") string (REGEX REPLACE "[^0-9]*([0-9]+).*" "\\1" _mlRevision ${_mlRevisionLine}) if (DEFINED _mlInterface AND DEFINED _mlRevision) set (_versionStr "${_mlInterface}.${_mlRevision}") set (Mathematica_MathLink_VERSION "${_versionStr}" CACHE INTERNAL "MathLink version." FORCE) endif() endif() endif() _setup_package_version_variables(Mathematica_MathLink) endmacro() # internal macro to setup WSTP version related variables macro (_setup_WSTP_version_variables) if (NOT Mathematica_WSTP_VERSION AND Mathematica_WSTP_INCLUDE_DIR) set (_file "${Mathematica_WSTP_INCLUDE_DIR}/wstp.h") if (EXISTS "${_file}") if (DEFINED Mathematica_WSTP_FIND_VERSION_MAJOR) set (_wstpInterface "${Mathematica_WSTP_FIND_VERSION_MAJOR}") else() file (STRINGS "${_file}" _wstpInterfaceLine REGEX ".*define.*(WS|ML)INTERFACE.*") if (_wstpInterfaceLine) string (REGEX REPLACE "[^0-9]*([0-9]+).*" "\\1" _wstpInterface ${_wstpInterfaceLine}) endif() endif() file (STRINGS "${_file}" _wstpRevisionLine REGEX ".*define.*(WS|ML)REVISION.*") string (REGEX REPLACE "[^0-9]*([0-9]+).*" "\\1" _wstpRevision ${_wstpRevisionLine}) if (DEFINED _wstpInterface AND DEFINED _wstpRevision) set (_versionStr "${_wstpInterface}.${_wstpRevision}") set (Mathematica_WSTP_VERSION "${_versionStr}" CACHE INTERNAL "WSTP version." FORCE) endif() endif() endif() _setup_package_version_variables(Mathematica_WSTP) endmacro() # internal macro to setup J/Link version related variables macro (_setup_jlink_version_variables) if (NOT Mathematica_JLink_VERSION AND Mathematica_JLink_PACKAGE_DIR) set (_file "${Mathematica_JLink_PACKAGE_DIR}/Source/Java/com/wolfram/jlink/KernelLink.java") if (EXISTS "${_file}") file (STRINGS "${_file}" _versionLine REGEX ".*String.*VERSION.*") string (REGEX REPLACE "[^0-9]*([0-9]+(\\.[0-9]+)*).*" "\\1" _versionStr "${_versionLine}") if (DEFINED _versionStr) set (Mathematica_JLink_VERSION "${_versionStr}" CACHE INTERNAL "J/Link version." FORCE) endif() endif() endif() _setup_package_version_variables(Mathematica_JLink) endmacro() # internal macro to setup MUnit version related variables macro (_setup_munit_package_version_variables) if (NOT Mathematica_MUnit_VERSION AND Mathematica_MUnit_PACKAGE_FILE) set (_file "${Mathematica_MUnit_PACKAGE_FILE}") if (EXISTS "${_file}") file (STRINGS "${_file}" _mUnitVersionNumberLine REGEX ".*`\\$VersionNumber.*") file (STRINGS "${_file}" _mUnitReleaseNumberLine REGEX ".*`\\$ReleaseNumber.*") file (STRINGS "${_file}" _mUnitVersionLine REGEX ".*`\\$Version.*") string (REGEX REPLACE "[^0-9]*([0-9]+\\.[0-9]+).*" "\\1" _mUnitVersionNumber ${_mUnitVersionNumberLine}) string (REGEX REPLACE "[^0-9]*([0-9]+).*" "\\1" _mUnitReleaseNumber ${_mUnitReleaseNumberLine}) if (DEFINED _mUnitVersionNumber AND DEFINED _mUnitReleaseNumber) set (_versionStr "${_mUnitVersionNumber}.${_mUnitReleaseNumber}") set (Mathematica_MUnit_VERSION "${_versionStr}" CACHE INTERNAL "MUnit version." FORCE) endif() endif() endif() _setup_package_version_variables(Mathematica_MUnit) endmacro() # internal macro to setup WolframLibrary library related variables macro (_setup_wolframlibrary_library_variables) if (Mathematica_WolframLibrary_LIBRARY) set (Mathematica_WolframLibrary_RUNTIME_LIBRARY_DIRS "") set (Mathematica_WolframLibrary_RUNTIME_LIBRARY_DIRS_DEBUG "") _setup_libraries_var(Mathematica_WolframLibrary_LIBRARY Mathematica_WolframLibrary_LIBRARIES) foreach (_library ${Mathematica_WolframLibrary_LIBRARIES}) get_filename_component (_libraryDir ${_library} DIRECTORY) list (APPEND Mathematica_LIBRARY_DIRS ${_libraryDir}) if (NOT Mathematica_USE_STATIC_LIBRARIES) list (APPEND Mathematica_WolframLibrary_RUNTIME_LIBRARY_DIRS ${_libraryDir}) list (APPEND Mathematica_WolframLibrary_RUNTIME_LIBRARY_DIRS_DEBUG ${_libraryDir}) list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS ${_libraryDir}) list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG ${_libraryDir}) endif() endforeach() if (NOT APPLE) # kernel binaries dir on Windows and Linux contains additional runtime libraries (e.g., Intel MKL) foreach (_systemID ${Mathematica_SYSTEM_IDS}) set (_kernelBinariesDir "${Mathematica_ROOT_DIR}/SystemFiles/Kernel/Binaries/${_systemID}") if (EXISTS "${_kernelBinariesDir}") list (APPEND Mathematica_LIBRARY_DIRS ${_kernelBinariesDir}) if (NOT Mathematica_USE_STATIC_LIBRARIES) list (APPEND Mathematica_WolframLibrary_RUNTIME_LIBRARY_DIRS "${_kernelBinariesDir}") list (APPEND Mathematica_WolframLibrary_RUNTIME_LIBRARY_DIRS_DEBUG "${_kernelBinariesDir}") list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS "${_kernelBinariesDir}") list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG "${_kernelBinariesDir}") endif() endif() endforeach() endif() _append_wolframlibrary_needed_system_libraries(Mathematica_WolframLibrary_LIBRARIES) list (REMOVE_DUPLICATES Mathematica_WolframLibrary_RUNTIME_LIBRARY_DIRS) list (REMOVE_DUPLICATES Mathematica_WolframLibrary_RUNTIME_LIBRARY_DIRS_DEBUG) list (APPEND Mathematica_LIBRARIES ${Mathematica_WolframLibrary_LIBRARIES}) endif() endmacro() # internal macro to setup MathLink library related variables macro (_setup_mathlink_library_variables) if (Mathematica_MathLink_LIBRARY) _setup_libraries_var(Mathematica_MathLink_LIBRARY Mathematica_MathLink_LIBRARIES) if (DEFINED Mathematica_MathLink_VERSION_MAJOR) set (Mathematica_MathLink_DEFINITIONS "-DMLINTERFACE=${Mathematica_MathLink_VERSION_MAJOR}") elseif (DEFINED Mathematica_MathLink_FIND_VERSION_MAJOR) set (Mathematica_MathLink_DEFINITIONS "-DMLINTERFACE=${Mathematica_MathLink_FIND_VERSION_MAJOR}") else() set (Mathematica_MathLink_DEFINITIONS "") endif() set (Mathematica_MathLink_RUNTIME_LIBRARY_DIRS "") set (Mathematica_MathLink_RUNTIME_LIBRARY_DIRS_DEBUG "") if (APPLE) set (Mathematica_MathLink_LINKER_FLAGS "") foreach (_library ${Mathematica_MathLink_LIBRARIES}) get_filename_component (_libraryDir ${_library} DIRECTORY) list (APPEND Mathematica_LIBRARY_DIRS ${_libraryDir}) endforeach() # for OS X we have to add the MathLink CompilerAdditions directory which contains the MathLink framework _get_target_flavor(_MathLinkFlavor) if (_MathLinkFlavor) set (_CompilerAdditions "${Mathematica_MathLink_ROOT_DIR}/CompilerAdditions/${_MathLinkFlavor}") else() set (_CompilerAdditions "${Mathematica_MathLink_ROOT_DIR}/CompilerAdditions") endif() if (IS_DIRECTORY "${_CompilerAdditions}") list (APPEND Mathematica_MathLink_RUNTIME_LIBRARY_DIRS "${_CompilerAdditions}") list (APPEND Mathematica_MathLink_RUNTIME_LIBRARY_DIRS_DEBUG "${_CompilerAdditions}") list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS "${_CompilerAdditions}") list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG "${_CompilerAdditions}") endif() elseif (UNIX) set (Mathematica_MathLink_LINKER_FLAGS "") foreach (_library ${Mathematica_MathLink_LIBRARIES}) get_filename_component (_libraryDir ${_library} DIRECTORY) list (APPEND Mathematica_LIBRARY_DIRS ${_libraryDir}) if (NOT Mathematica_USE_STATIC_LIBRARIES) list (APPEND Mathematica_MathLink_RUNTIME_LIBRARY_DIRS ${_libraryDir}) list (APPEND Mathematica_MathLink_RUNTIME_LIBRARY_DIRS_DEBUG ${_libraryDir}) list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS ${_libraryDir}) list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG ${_libraryDir}) endif() endforeach() elseif (WIN32) set (Mathematica_MathLink_LINKER_FLAGS "") foreach (_library ${Mathematica_MathLink_LIBRARIES}) get_filename_component (_libraryDir ${_library} DIRECTORY) list (APPEND Mathematica_LIBRARY_DIRS ${_libraryDir}) endforeach() # Windows MathLink SDK has runtime DLLs in a separate directory set (_runtimeDir "${Mathematica_MathLink_ROOT_DIR}/SystemAdditions") if (IS_DIRECTORY "${_runtimeDir}") list (APPEND Mathematica_MathLink_RUNTIME_LIBRARY_DIRS "${_runtimeDir}") list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS "${_runtimeDir}") endif() # Windows MathLink SDK also ships with debug DLLs in AlternativeComponents set (_runtimeDir "${Mathematica_MathLink_ROOT_DIR}/AlternativeComponents/DebugLibraries") if (IS_DIRECTORY "${_runtimeDir}") list (APPEND Mathematica_MathLink_RUNTIME_LIBRARY_DIRS_DEBUG "${_runtimeDir}") list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG "${_runtimeDir}") endif() endif() _append_mathlink_needed_system_libraries(Mathematica_MathLink_LIBRARIES) list (REMOVE_DUPLICATES Mathematica_MathLink_RUNTIME_LIBRARY_DIRS) list (REMOVE_DUPLICATES Mathematica_MathLink_RUNTIME_LIBRARY_DIRS_DEBUG) list (APPEND Mathematica_LIBRARIES ${Mathematica_MathLink_LIBRARIES}) endif() endmacro() # internal macro to setup WSTP library related variables macro (_setup_WSTP_library_variables) if (Mathematica_WSTP_LIBRARY) _setup_libraries_var(Mathematica_WSTP_LIBRARY Mathematica_WSTP_LIBRARIES) if (DEFINED Mathematica_WSTP_VERSION_MAJOR) set (Mathematica_WSTP_DEFINITIONS "-DWSINTERFACE=${Mathematica_WSTP_VERSION_MAJOR}") elseif (DEFINED Mathematica_WSTP_FIND_VERSION_MAJOR) set (Mathematica_WSTP_DEFINITIONS "-DWSINTERFACE=${Mathematica_WSTP_FIND_VERSION_MAJOR}") else() set (Mathematica_WSTP_DEFINITIONS "") endif() set (Mathematica_WSTP_RUNTIME_LIBRARY_DIRS "") set (Mathematica_WSTP_RUNTIME_LIBRARY_DIRS_DEBUG "") if (APPLE) set (Mathematica_WSTP_LINKER_FLAGS "") foreach (_library ${Mathematica_WSTP_LIBRARIES}) get_filename_component (_libraryDir ${_library} DIRECTORY) list (APPEND Mathematica_LIBRARY_DIRS ${_libraryDir}) endforeach() # for OS X we have to add the WSTP CompilerAdditions directory which contains the WSTP framework _get_target_flavor(_WSTPFlavor) if (_WSTPFlavor) set (_CompilerAdditions "${Mathematica_WSTP_ROOT_DIR}/CompilerAdditions/${_WSTPFlavor}") else() set (_CompilerAdditions "${Mathematica_WSTP_ROOT_DIR}/CompilerAdditions") endif() if (IS_DIRECTORY "${_CompilerAdditions}") list (APPEND Mathematica_WSTP_RUNTIME_LIBRARY_DIRS ${_libraryDir}) list (APPEND Mathematica_WSTP_RUNTIME_LIBRARY_DIRS_DEBUG ${_libraryDir}) list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS "${_CompilerAdditions}") list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG "${_CompilerAdditions}") endif() elseif (UNIX) set (Mathematica_WSTP_LINKER_FLAGS "") foreach (_library ${Mathematica_WSTP_LIBRARIES}) get_filename_component (_libraryDir ${_library} DIRECTORY) list (APPEND Mathematica_LIBRARY_DIRS ${_libraryDir}) if (NOT Mathematica_USE_STATIC_LIBRARIES) list (APPEND Mathematica_WSTP_RUNTIME_LIBRARY_DIRS ${_libraryDir}) list (APPEND MathematicaWSTP_RUNTIME_LIBRARY_DIRS_DEBUG ${_libraryDir}) list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS ${_libraryDir}) list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG ${_libraryDir}) endif() endforeach() elseif (WIN32) set (Mathematica_WSTP_LINKER_FLAGS "") foreach (_library ${Mathematica_WSTP_LIBRARIES}) get_filename_component (_libraryDir ${_library} DIRECTORY) list (APPEND Mathematica_LIBRARY_DIRS ${_libraryDir}) endforeach() # Windows WSTP SDK has runtime DLLs in a separate directory set (_runtimeDir "${Mathematica_WSTP_ROOT_DIR}/SystemAdditions") if (IS_DIRECTORY "${_runtimeDir}") list (APPEND Mathematica_WSTP_RUNTIME_LIBRARY_DIRS "${_runtimeDir}") list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS "${_runtimeDir}") endif() # Windows WSTP SDK also ships with debug DLLs in AlternativeComponents set (_runtimeDir "${Mathematica_WSTP_ROOT_DIR}/AlternativeComponents/DebugLibraries") if (IS_DIRECTORY "${_runtimeDir}") list (APPEND Mathematica_WSTP_RUNTIME_LIBRARY_DIRS_DEBUG "${_runtimeDir}") list (APPEND Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG "${_runtimeDir}") endif() endif() _append_WSTP_needed_system_libraries(Mathematica_WSTP_LIBRARIES) list (REMOVE_DUPLICATES Mathematica_WSTP_RUNTIME_LIBRARY_DIRS) list (REMOVE_DUPLICATES Mathematica_WSTP_RUNTIME_LIBRARY_DIRS_DEBUG) list (APPEND Mathematica_LIBRARIES ${Mathematica_WSTP_LIBRARIES}) endif() endmacro() # internal macro to log used variables macro (_log_used_variables) if (Mathematica_DEBUG) message (STATUS "Executing on ${CMAKE_HOST_SYSTEM}, ${CMAKE_HOST_SYSTEM_NAME}, ${CMAKE_HOST_SYSTEM_PROCESSOR}, ${CMAKE_HOST_SYSTEM_VERSION}") message (STATUS "Compiling for ${CMAKE_SYSTEM}, ${CMAKE_SYSTEM_NAME}, ${CMAKE_SYSTEM_PROCESSOR}, ${CMAKE_SYSTEM_VERSION}") message (STATUS "Configuration: ${CMAKE_BUILD_TYPE}, ${CMAKE_CONFIGURATION_TYPES}") message (STATUS "Configuration directory: ${CMAKE_CFG_INTDIR}") message (STATUS "Project source dir: ${PROJECT_SOURCE_DIR}") message (STATUS "Project binary dir: ${PROJECT_BINARY_DIR}") message (STATUS "Cross compiling: ${CMAKE_CROSSCOMPILING}") message (STATUS "Library prefixes: ${CMAKE_FIND_LIBRARY_PREFIXES}") message (STATUS "Library suffixes: ${CMAKE_FIND_LIBRARY_SUFFIXES}") message (STATUS "Current file: ${CMAKE_CURRENT_LIST_FILE}:${CMAKE_CURRENT_LIST_LINE}") message (STATUS "Parent file: ${CMAKE_PARENT_LIST_FILE}") message (STATUS "Find version: ${Mathematica_FIND_VERSION}") message (STATUS "Find exact: ${Mathematica_FIND_VERSION_EXACT}") message (STATUS "Find quietly: ${Mathematica_FIND_QUIETLY}") message (STATUS "Find required: ${Mathematica_FIND_REQUIRED}") message (STATUS "Find components: ${Mathematica_FIND_COMPONENTS}") message (STATUS "Find required MathLink: ${Mathematica_FIND_REQUIRED_MathLink}") message (STATUS "Find MathLink interface version: ${Mathematica_MathLink_FIND_VERSION_MAJOR}") message (STATUS "Find MathLink revision number: ${Mathematica_MathLink_FIND_VERSION_MINOR}") message (STATUS "Find required WSTP: ${Mathematica_FIND_REQUIRED_WSTP}") message (STATUS "Find WSTP interface version: ${Mathematica_WSTP_FIND_VERSION_MAJOR}") message (STATUS "Find WSTP revision number: ${Mathematica_WSTP_FIND_VERSION_MINOR}") message (STATUS "Find required WolframLibrary: ${Mathematica_FIND_REQUIRED_WolframLibrary}") message (STATUS "Find required J/Link: ${Mathematica_FIND_REQUIRED_JLink}") message (STATUS "Find required MUnit: ${Mathematica_FIND_REQUIRED_MUnit}") message (STATUS "Use static libraries: ${Mathematica_USE_STATIC_LIBRARIES}") message (STATUS "Use minimal libraries: ${Mathematica_USE_MINIMAL_LIBRARIES}") endif() endmacro() # internal macro to log found variables macro (_log_found_variables) if (Mathematica_DEBUG) message (STATUS "Mathematica CMake module dir ${Mathematica_CMAKE_MODULE_DIR}") if (Mathematica_FOUND) message (STATUS "Mathematica ${Mathematica_VERSION} found") message (STATUS "Mathematica creation ID ${Mathematica_CREATION_ID}") message (STATUS "Mathematica target root dir ${Mathematica_ROOT_DIR}") message (STATUS "Mathematica host root dir ${Mathematica_HOST_ROOT_DIR}") message (STATUS "Mathematica host MathLink include dir ${Mathematica_MathLink_HOST_INCLUDE_DIR}") message (STATUS "Mathematica host WSTP include dir ${Mathematica_WSTP_HOST_INCLUDE_DIR}") message (STATUS "Mathematica kernel executable ${Mathematica_KERNEL_EXECUTABLE}") message (STATUS "Mathematica frontend executable ${Mathematica_FRONTEND_EXECUTABLE}") message (STATUS "Mathematica target system ID ${Mathematica_SYSTEM_ID}") message (STATUS "Mathematica target system IDs ${Mathematica_SYSTEM_IDS}") message (STATUS "Mathematica host system ID ${Mathematica_HOST_SYSTEM_ID}") message (STATUS "Mathematica host system IDs ${Mathematica_HOST_SYSTEM_IDS}") message (STATUS "Mathematica base directory ${Mathematica_BASE_DIR}") message (STATUS "Mathematica user base directory ${Mathematica_USERBASE_DIR}") message (STATUS "Mathematica include dir ${Mathematica_INCLUDE_DIR}") message (STATUS "Mathematica include dirs ${Mathematica_INCLUDE_DIRS}") message (STATUS "Mathematica libraries ${Mathematica_LIBRARIES}") message (STATUS "Mathematica library dirs ${Mathematica_LIBRARY_DIRS}") message (STATUS "Mathematica runtime library dirs ${Mathematica_RUNTIME_LIBRARY_DIRS}") message (STATUS "Mathematica runtime debug library dirs ${Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG}") else() message (STATUS "Mathematica not found") endif() if (Mathematica_WolframLibrary_FOUND) message (STATUS "WolframLibrary ${Mathematica_WolframLibrary_VERSION} found") message (STATUS "WolframLibrary include dir ${Mathematica_WolframLibrary_INCLUDE_DIR}") message (STATUS "WolframLibrary library ${Mathematica_WolframLibrary_LIBRARY}") message (STATUS "WolframLibrary libraries ${Mathematica_WolframLibrary_LIBRARIES}") message (STATUS "WolframLibrary runtime library dirs ${Mathematica_WolframLibrary_RUNTIME_LIBRARY_DIRS}") message (STATUS "WolframLibrary runtime debug library dirs ${Mathematica_WolframLibrary_RUNTIME_LIBRARY_DIRS_DEBUG}") message (STATUS "LibraryLink package dir ${Mathematica_LibraryLink_PACKAGE_DIR}") else() message (STATUS "WolframLibrary not found") endif() if (Mathematica_MathLink_FOUND) message (STATUS "MathLink ${Mathematica_MathLink_VERSION} found") message (STATUS "MathLink target root dir ${Mathematica_MathLink_ROOT_DIR}") message (STATUS "MathLink host root dir ${Mathematica_MathLink_HOST_ROOT_DIR}") message (STATUS "MathLink include dir ${Mathematica_MathLink_INCLUDE_DIR}") message (STATUS "MathLink library ${Mathematica_MathLink_LIBRARY}") message (STATUS "MathLink libraries ${Mathematica_MathLink_LIBRARIES}") message (STATUS "MathLink mprep executable ${Mathematica_MathLink_MPREP_EXECUTABLE}") message (STATUS "MathLink definitions ${Mathematica_MathLink_DEFINITIONS}") message (STATUS "MathLink linker flags ${Mathematica_MathLink_LINKER_FLAGS}") message (STATUS "MathLink runtime library dirs ${Mathematica_MathLink_RUNTIME_LIBRARY_DIRS}") message (STATUS "MathLink runtime debug library dirs ${Mathematica_MathLink_RUNTIME_LIBRARY_DIRS_DEBUG}") else() message (STATUS "MathLink not found") endif() if (Mathematica_WSTP_FOUND) message (STATUS "WSTP ${Mathematica_WSTP_VERSION} found") message (STATUS "WSTP target root dir ${Mathematica_WSTP_ROOT_DIR}") message (STATUS "WSTP host root dir ${Mathematica_WSTP_HOST_ROOT_DIR}") message (STATUS "WSTP include dir ${Mathematica_WSTP_INCLUDE_DIR}") message (STATUS "WSTP library ${Mathematica_WSTP_LIBRARY}") message (STATUS "WSTP libraries ${Mathematica_WSTP_LIBRARIES}") message (STATUS "WSTP wsprep executable ${Mathematica_WSTP_WSPREP_EXECUTABLE}") message (STATUS "WSTP definitions ${Mathematica_WSTP_DEFINITIONS}") message (STATUS "WSTP linker flags ${Mathematica_WSTP_LINKER_FLAGS}") message (STATUS "WSTP runtime library dirs ${Mathematica_WSTP_RUNTIME_LIBRARY_DIRS}") message (STATUS "WSTP runtime debug library dirs ${Mathematica_WSTP_RUNTIME_LIBRARY_DIRS_DEBUG}") else() message (STATUS "WSTP not found") endif() if (Mathematica_JLink_FOUND) message (STATUS "J/Link ${Mathematica_JLink_VERSION} found") message (STATUS "J/Link package dir ${Mathematica_JLink_PACKAGE_DIR}") message (STATUS "J/Link JAR file ${Mathematica_JLink_JAR_FILE}") message (STATUS "J/Link native library ${Mathematica_JLink_RUNTIME_LIBRARY}") message (STATUS "J/Link java launcher ${Mathematica_JLink_JAVA_EXECUTABLE}") message (STATUS "J/Link java home directory ${Mathematica_JLink_JAVA_HOME}") else() message (STATUS "J/Link not found") endif() if (Mathematica_MUnit_FOUND) message (STATUS "MUnit ${Mathematica_MUnit_VERSION} found") message (STATUS "MUnit package dir ${Mathematica_MUnit_PACKAGE_DIR}") else() message (STATUS "MUnit not found") endif() endif() # warn explicitly about common mistakes users make if (UNIX AND NOT APPLE) if (DEFINED Mathematica_uuid_LIBRARY) if (Mathematica_uuid_LIBRARY MATCHES "-NOTFOUND$") message (WARNING "WSTP and MathLink require libuuid. Install libuuid with the system package manager.") endif() endif() endif() if (DEFINED Mathematica_VERSION) if (CMAKE_SIZEOF_VOID_P EQUAL 4) if (WINDOWS) if (NOT "${Mathematica_VERSION}" VERSION_LESS "12.1") message (WARNING "Windows Mathematica ${Mathematica_VERSION} does not support 32-bit.") endif() elseif (APPLE) if (NOT "${Mathematica_VERSION}" VERSION_LESS "9.0") message (WARNING "Mac Mathematica ${Mathematica_VERSION} does not support 32-bit.") endif() elseif (UNIX) if (NOT "${Mathematica_VERSION}" VERSION_LESS "11.3") message (WARNING "Linux Mathematica ${Mathematica_VERSION} does not support 32-bit.") endif() endif() endif() endif() if (DEFINED Mathematica_VERSION_MAJOR AND DEFINED Mathematica_VERSION_MINOR AND DEFINED Mathematica_SYSTEM_IDS) if (APPLE AND "${Mathematica_VERSION_MAJOR}" EQUAL 5 AND "${Mathematica_VERSION_MINOR}" EQUAL 2) foreach (_systemID ${Mathematica_SYSTEM_IDS}) if ("${_systemID}" STREQUAL "MacOSX-x86-64") message (WARNING "Mathematica 5.2 for Mac OS X does not support x86_64, run cmake with option -DCMAKE_OSX_ARCHITECTURES=i386.") endif() endforeach() endif() endif() if (CYGWIN AND CMAKE_COMPILER_IS_GNUCC AND Mathematica_WolframLibrary_FOUND) if ("${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "3.0.0" OR NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "4.0.0") message (WARNING "LibraryLink DLL generation requires the -mno-cygwin compiler flag, which is not supported by gcc ${CMAKE_C_COMPILER_VERSION}." " Run cmake with options -DCMAKE_CXX_COMPILER=/usr/bin/g++-3.exe -DCMAKE_C_COMPILER=/usr/bin/gcc-3.exe.") endif() endif() endmacro(_log_found_variables) # internal macro returns cache variables that determine search result macro (_get_cache_variables _CacheVariables) set (${_CacheVariables} Mathematica_FIND_VERSION Mathematica_FIND_VERSION_EXACT Mathematica_USE_STATIC_LIBRARIES Mathematica_USE_MINIMAL_LIBRARIES Mathematica_USE_LIBCXX_LIBRARIES Mathematica_SYSTEM_IDS Mathematica_CREATION_ID Mathematica_ROOT_DIR Mathematica_HOST_ROOT_DIR Mathematica_MathLink_FIND_VERSION_MAJOR Mathematica_MathLink_FIND_VERSION_MINOR Mathematica_MathLink_ROOT_DIR Mathematica_MathLink_HOST_ROOT_DIR Mathematica_WSTP_FIND_VERSION_MAJOR Mathematica_WSTP_FIND_VERSION_MINOR Mathematica_WSTP_ROOT_DIR Mathematica_WSTP_HOST_ROOT_DIR Mathematica_JLink_PACKAGE_DIR Mathematica_MUnit_PACKAGE_FILE Mathematica_LibraryLink_PACKAGE_FILE Mathematica_CMAKE_MODULE_VERSION) endmacro() # internal macro returns cache variables that are dependent on the given variable macro (_get_dependent_cache_variables _var _outDependentVars) # do comparisons with an underscore prefix to prevent CMake from automatically # resolving the left and right hand arguments to STREQUAL if ("_${_var}" STREQUAL "_Mathematica_FIND_VERSION" OR "_${_var}" STREQUAL "_Mathematica_FIND_VERSION_EXACT") list (APPEND ${_outDependentVars} Mathematica_ROOT_DIR Mathematica_HOST_ROOT_DIR Mathematica_VERSION) _get_dependent_cache_variables("Mathematica_ROOT_DIR" ${_outDependentVars}) _get_dependent_cache_variables("Mathematica_HOST_ROOT_DIR" ${_outDependentVars}) elseif ("_${_var}" STREQUAL "_Mathematica_ROOT_DIR" OR "_${_var}" STREQUAL "_Mathematica_SYSTEM_IDS") list (APPEND ${_outDependentVars} Mathematica_VERSION Mathematica_INCLUDE_DIR Mathematica_WolframLibrary_VERSION Mathematica_WolframLibrary_INCLUDE_DIR Mathematica_WolframLibrary_LIBRARY Mathematica_KERNEL_HOST_SYSTEM_ID Mathematica_MathLink_ROOT_DIR Mathematica_WSTP_ROOT_DIR Mathematica_KERNEL_BASE_DIR Mathematica_KERNEL_USERBASE_DIR) _get_dependent_cache_variables("Mathematica_MathLink_ROOT_DIR" ${_outDependentVars}) _get_dependent_cache_variables("Mathematica_WSTP_ROOT_DIR" ${_outDependentVars}) elseif ("_${_var}" STREQUAL "_Mathematica_CREATION_ID") # all cached Mathematica version variables are dependent on the cached creation ID list (APPEND ${_outDependentVars} Mathematica_VERSION Mathematica_WolframLibrary_VERSION Mathematica_MathLink_VERSION Mathematica_WSTP_VERSION Mathematica_JLink_VERSION Mathematica_MUnit_VERSION) elseif ("_${_var}" STREQUAL "_Mathematica_HOST_ROOT_DIR" OR "_${_var}" STREQUAL "_Mathematica_HOST_SYSTEM_IDS") list (APPEND ${_outDependentVars} Mathematica_FRONTEND_EXECUTABLE Mathematica_KERNEL_EXECUTABLE Mathematica_KERNEL_HOST_SYSTEM_ID Mathematica_MathLink_HOST_ROOT_DIR Mathematica_WSTP_HOST_ROOT_DIR Mathematica_KERNEL_BASE_DIR Mathematica_KERNEL_USERBASE_DIR Mathematica_JLink_PACKAGE_DIR Mathematica_MUnit_PACKAGE_FILE Mathematica_LibraryLink_PACKAGE_FILE Mathematica_JLink_JAVA_EXECUTABLE Mathematica_JLink_JAVA_HOME) _get_dependent_cache_variables("Mathematica_MathLink_HOST_ROOT_DIR" ${_outDependentVars}) _get_dependent_cache_variables("Mathematica_WSTP_HOST_ROOT_DIR" ${_outDependentVars}) _get_dependent_cache_variables("Mathematica_JLink_PACKAGE_DIR" ${_outDependentVars}) _get_dependent_cache_variables("Mathematica_MUnit_PACKAGE_FILE" ${_outDependentVars}) elseif ("_${_var}" STREQUAL "_Mathematica_MathLink_ROOT_DIR") list (APPEND ${_outDependentVars} Mathematica_MathLink_VERSION Mathematica_MathLink_INCLUDE_DIR Mathematica_MathLink_LIBRARY) elseif ("_${_var}" STREQUAL "_Mathematica_MathLink_HOST_ROOT_DIR") list (APPEND ${_outDependentVars} Mathematica_MathLink_HOST_INCLUDE_DIR Mathematica_MathLink_MPREP_EXECUTABLE) elseif ("_${_var}" STREQUAL "_Mathematica_MathLink_FIND_VERSION_MAJOR" OR "_${_var}" STREQUAL "_Mathematica_MathLink_FIND_VERSION_MINOR") list (APPEND ${_outDependentVars} Mathematica_MathLink_VERSION Mathematica_MathLink_INCLUDE_DIR Mathematica_MathLink_LIBRARY Mathematica_MathLink_HOST_INCLUDE_DIR Mathematica_MathLink_MPREP_EXECUTABLE) elseif ("_${_var}" STREQUAL "_Mathematica_WSTP_ROOT_DIR") list (APPEND ${_outDependentVars} Mathematica_WSTP_VERSION Mathematica_WSTP_INCLUDE_DIR Mathematica_WSTP_LIBRARY) elseif ("_${_var}" STREQUAL "_Mathematica_WSTP_HOST_ROOT_DIR") list (APPEND ${_outDependentVars} Mathematica_WSTP_HOST_INCLUDE_DIR Mathematica_WSTP_WSPREP_EXECUTABLE) elseif ("_${_var}" STREQUAL "_Mathematica_WSTP_FIND_VERSION_MAJOR" OR "_${_var}" STREQUAL "_Mathematica_WSTP_FIND_VERSION_MINOR") list (APPEND ${_outDependentVars} Mathematica_WSTP_VERSION Mathematica_WSTP_INCLUDE_DIR Mathematica_WSTP_LIBRARY Mathematica_WSTP_HOST_INCLUDE_DIR Mathematica_WSTP_WSPREP_EXECUTABLE) elseif ("_${_var}" STREQUAL "_Mathematica_USE_STATIC_LIBRARIES") list (APPEND ${_outDependentVars} Mathematica_WolframLibrary_LIBRARY Mathematica_MathLink_LIBRARY Mathematica_MathLink_INCLUDE_DIR Mathematica_MathLink_HOST_INCLUDE_DIR Mathematica_WSTP_LIBRARY Mathematica_WSTP_INCLUDE_DIR Mathematica_WSTP_HOST_INCLUDE_DIR) elseif ("_${_var}" STREQUAL "_Mathematica_USE_MINIMAL_LIBRARIES") list (APPEND ${_outDependentVars} Mathematica_WolframLibrary_LIBRARY) elseif ("_${_var}" STREQUAL "_Mathematica_USE_LIBCXX_LIBRARIES") list (APPEND ${_outDependentVars} Mathematica_MathLink_LIBRARY Mathematica_WSTP_LIBRARY) elseif ("_${_var}" STREQUAL "_Mathematica_JLink_PACKAGE_DIR") list (APPEND ${_outDependentVars} Mathematica_JLink_VERSION Mathematica_JLink_RUNTIME_LIBRARY) elseif ("_${_var}" STREQUAL "_Mathematica_MUnit_PACKAGE_FILE") list (APPEND ${_outDependentVars} Mathematica_MUnit_VERSION) endif() endmacro(_get_dependent_cache_variables) # internal macro to cleanup outdated cache variables macro (_cleanup_cache) _get_cache_variables(_CacheVariables) set (_vars_to_clean "") foreach (_CacheVariable IN LISTS _CacheVariables) get_property(_cacheVariableType CACHE "${_CacheVariable}" PROPERTY TYPE) if (DEFINED ${_CacheVariable} AND DEFINED ${_CacheVariable}_LAST) if (NOT "${${_CacheVariable}}" STREQUAL "${${_CacheVariable}_LAST}") # search var has changed _get_dependent_cache_variables(${_CacheVariable} _vars_to_clean) if (Mathematica_DEBUG) message (STATUS "${_CacheVariable} changed from ${${_CacheVariable}_LAST} to ${${_CacheVariable}}") endif() elseif ("${_cacheVariableType}" MATCHES "PATH" AND NOT "${${_CacheVariable}}" MATCHES "-NOTFOUND$" AND NOT EXISTS "${${_CacheVariable}}") # original var path no longer exists list (APPEND _vars_to_clean "${_CacheVariable}") _get_dependent_cache_variables(${_CacheVariable} _vars_to_clean) if (Mathematica_DEBUG) message (STATUS "${_CacheVariable} path ${${_CacheVariable}} no longer exists") endif() elseif ("${_cacheVariableType}" MATCHES "PATH" AND EXISTS "${${_CacheVariable}}" AND "${${_CacheVariable}}" IS_NEWER_THAN "${CMAKE_CACHEFILE_DIR}/CMakeCache.txt") # search var path has changed _get_dependent_cache_variables(${_CacheVariable} _vars_to_clean) if (Mathematica_DEBUG) message (STATUS "${_CacheVariable} path ${${_CacheVariable}} modified since last CMake run") endif() endif() elseif (DEFINED ${_CacheVariable} OR DEFINED ${_CacheVariable}_LAST) # search var presence changed _get_dependent_cache_variables(${_CacheVariable} _vars_to_clean) if (Mathematica_DEBUG) message (STATUS "${_CacheVariable} presence changed") endif() endif() endforeach() if (_vars_to_clean) list (REMOVE_DUPLICATES _vars_to_clean) message (STATUS "Mathematica environment changed, restart search ...") if (Mathematica_DEBUG) message (STATUS "Unset ${_vars_to_clean}") endif() foreach (_CacheVariable IN LISTS _vars_to_clean) unset(${_CacheVariable} CACHE) unset(${_CacheVariable}) endforeach() endif() endmacro() # internal macro to update cache variables macro (_update_cache) mark_as_advanced( Mathematica_INCLUDE_DIR Mathematica_KERNEL_EXECUTABLE Mathematica_FRONTEND_EXECUTABLE Mathematica_WolframLibrary_INCLUDE_DIR Mathematica_WolframLibrary_LIBRARY Mathematica_MathLink_INCLUDE_DIR Mathematica_MathLink_LIBRARY Mathematica_MathLink_HOST_INCLUDE_DIR Mathematica_MathLink_MPREP_EXECUTABLE Mathematica_WSTP_INCLUDE_DIR Mathematica_WSTP_LIBRARY Mathematica_WSTP_HOST_INCLUDE_DIR Mathematica_WSTP_WSPREP_EXECUTABLE Mathematica_KERNEL_HOST_SYSTEM_ID Mathematica_KERNEL_BASE_DIR Mathematica_KERNEL_USERBASE_DIR Mathematica_MUnit_PACKAGE_FILE Mathematica_LibraryLink_PACKAGE_FILE Mathematica_JLink_RUNTIME_LIBRARY Mathematica_JLink_JAVA_EXECUTABLE Mathematica_JLink_JAVA_HOME ) _get_cache_variables(_CacheVariables) foreach (_CacheVariable IN LISTS _CacheVariables) if (DEFINED ${_CacheVariable}) set (${_CacheVariable}_LAST ${${_CacheVariable}} CACHE INTERNAL "Last value of ${_CacheVariable}." FORCE) else() unset(${_CacheVariable}_LAST CACHE) endif() endforeach() endmacro() # internal macro to return variables that need to exist in order for component # to be considered found successfully macro (_get_required_vars _component _outVars) if ("${_component}" STREQUAL "Mathematica") set (${_outVars} Mathematica_ROOT_DIR Mathematica_KERNEL_EXECUTABLE) elseif ("${_component}" STREQUAL "MathLink") set (${_outVars} Mathematica_MathLink_LIBRARY Mathematica_MathLink_INCLUDE_DIR) elseif ("${_component}" STREQUAL "WSTP") set (${_outVars} Mathematica_WSTP_LIBRARY Mathematica_WSTP_INCLUDE_DIR) elseif ("${_component}" STREQUAL "WolframLibrary") set (${_outVars} Mathematica_WolframLibrary_LIBRARY Mathematica_WolframLibrary_INCLUDE_DIR) elseif ("${_component}" STREQUAL "JLink") set (${_outVars} Mathematica_JLink_PACKAGE_DIR Mathematica_JLink_JAR_FILE) elseif ("${_component}" STREQUAL "MUnit") set (${_outVars} Mathematica_MUnit_PACKAGE_DIR) endif() endmacro() macro (_get_components_to_find _outComponents) if (Mathematica_FIND_COMPONENTS) list (APPEND ${_outComponents} ${Mathematica_FIND_COMPONENTS}) else() if (DEFINED Mathematica_FIND_VERSION_MAJOR) set (_versionMajor "${Mathematica_FIND_VERSION_MAJOR}") elseif (DEFINED Mathematica_VERSION_MAJOR) set (_versionMajor "${Mathematica_VERSION_MAJOR}") else() set (_versionMajor "") endif() if (_versionMajor) if (_versionMajor GREATER 9) list (APPEND ${_outComponents} "WSTP" "MathLink" "WolframLibrary" "JLink" "MUnit") elseif (_versionMajor GREATER 7) list (APPEND ${_outComponents} "MathLink" "WolframLibrary" "JLink" "MUnit") else() list (APPEND ${_outComponents} "MathLink" "JLink" "MUnit") endif() else() list (APPEND ${_outComponents} "WSTP" "MathLink" "WolframLibrary" "JLink" "MUnit") endif() endif() list (REMOVE_DUPLICATES ${_outComponents}) endmacro() # internal macro to handle the QUIETLY and REQUIRED arguments and set *_FOUND variables macro (_setup_found_variables) # determine required Mathematica components _get_required_vars("Mathematica" _requiredVars) _get_components_to_find(_components) foreach(_component IN LISTS _components) _get_required_vars(${_component} _requiredComponentVars) # suppress find_package_handle_standard_args warning on mismatching names set (FPHSA_NAME_MISMATCHED On) find_package_handle_standard_args( Mathematica_${_component} REQUIRED_VARS ${_requiredComponentVars} VERSION_VAR Mathematica_${_component}_VERSION) string(TOUPPER ${_component} _UpperCaseComponent) # find_package_handle_standard_args only sets upper case _FOUND variable set (Mathematica_${_component}_FOUND ${MATHEMATICA_${_UpperCaseComponent}_FOUND}) if (Mathematica_FIND_REQUIRED_${_component}) list (APPEND _requiredVars ${_requiredComponentVars} ) endif() endforeach() find_package_handle_standard_args( Mathematica REQUIRED_VARS ${_requiredVars} VERSION_VAR Mathematica_VERSION) # find_package_handle_standard_args only sets upper case _FOUND variable set (Mathematica_FOUND ${MATHEMATICA_FOUND}) endmacro() # internal macro that searches for requested components macro (_find_components) _get_components_to_find(_components) foreach(_component IN LISTS _components) if ("${_component}" STREQUAL "MathLink") _find_mathlink() _setup_mathlink_version_variables() _setup_mathlink_library_variables() elseif ("${_component}" STREQUAL "WSTP") _find_wstp() _setup_wstp_version_variables() _setup_wstp_library_variables() elseif ("${_component}" STREQUAL "WolframLibrary") _find_wolframlibrary() _setup_wolframlibrary_version_variables() _setup_wolframlibrary_library_variables() _find_librarylink_package() elseif ("${_component}" STREQUAL "JLink") _find_jlink() _setup_jlink_version_variables() elseif ("${_component}" STREQUAL "MUnit") _find_munit_package() _setup_munit_package_version_variables() else() message (FATAL_ERROR "Unknown Mathematica component ${_component}") endif() endforeach() list (REMOVE_DUPLICATES Mathematica_INCLUDE_DIRS) list (REMOVE_DUPLICATES Mathematica_LIBRARIES) list (REMOVE_DUPLICATES Mathematica_LIBRARY_DIRS) list (REMOVE_DUPLICATES Mathematica_RUNTIME_LIBRARY_DIRS) list (REMOVE_DUPLICATES Mathematica_RUNTIME_LIBRARY_DIRS_DEBUG) endmacro() # internal helper function to compute the install name of a shared library under Mac OS X macro (_get_install_name _libraryPath _libraryInstallName _libraryAbsPath) if (APPLE) set (${_libraryInstallName} "") set (${_libraryAbsPath} "") if (IS_DIRECTORY "${_libraryPath}") # framework folder get_filename_component(_name "${_libraryPath}" NAME_WE) set (_path "${_libraryPath}/${_name}") else() set (_path "${_libraryPath}") endif() if (EXISTS "${_path}") find_program(Mathematica_OTOOL_EXECUTABLE "otool") mark_as_advanced(Mathematica_OTOOL_EXECUTABLE) get_filename_component(${_libraryAbsPath} ${_path} ABSOLUTE) set (_otoolOutput "") if (Mathematica_OTOOL_EXECUTABLE) execute_process( COMMAND "${Mathematica_OTOOL_EXECUTABLE}" "-D" "-X" "${${_libraryAbsPath}}" TIMEOUT 5 OUTPUT_VARIABLE _otoolOutput OUTPUT_STRIP_TRAILING_WHITESPACE) # install name is in last line of otool output string (REPLACE "\n" ";" _otoolOutput "${_otoolOutput}") endif() if (_otoolOutput) list (GET _otoolOutput -1 ${_libraryInstallName}) else() set (${_libraryInstallName} "") endif() endif() endif() endmacro() # FindMathematica "main" starts here _setup_findmathematica_options() _log_used_variables() _setup_mathematica_systemIDs() _setup_mathematica_creationID() if (DEFINED Mathematica_SYSTEM_IDS_LAST) # not the initial find invocation _cleanup_cache() endif() _setup_mathematica_base_directory() _setup_mathematica_userbase_directory() _find_mathematica() _setup_mathematica_version_variables() # now setup public functions based on found components # public function to convert a CMake string to a Mathematica string function (Mathematica_TO_NATIVE_STRING _inStr _outStr) string (REPLACE "\\" "\\\\" _str ${_inStr}) string (REPLACE "\"" "\\\"" _str ${_str}) set (${_outStr} "\"${_str}\"" PARENT_SCOPE) endfunction() # public function to convert a CMake list to a Mathematica list function (Mathematica_TO_NATIVE_LIST _outList) set (_list "{") foreach (_elem ${ARGN}) Mathematica_TO_NATIVE_STRING(${_elem} _elemStr) if ("${_list}" STREQUAL "{") set (_list "{${_elemStr}") else() set (_list "${_list},${_elemStr}") endif() endforeach() set (${_outList} "${_list}}" PARENT_SCOPE) endfunction() # public function to convert CMake paths to Mathematica paths function (Mathematica_TO_NATIVE_PATH _inPathStr _outPathStr) list (LENGTH _inPathStr _len) if (_len EQUAL 0) set (${_outPathStr} "" PARENT_SCOPE) elseif (_len EQUAL 1) _to_native_path("${_inPathStr}" _nativePath) Mathematica_TO_NATIVE_STRING("${_nativePath}" _pathMma) set (${_outPathStr} "${_pathMma}" PARENT_SCOPE) else() set (_lastDir "") set (_names "") set (_nativePathsMma "") set (_hasMapPaths FALSE) set (_requiresList FALSE) foreach (_path IN LISTS _inPathStr ITEMS "") get_filename_component(_dir "${_path}" DIRECTORY) get_filename_component(_name "${_path}" NAME) if (_lastDir AND NOT "${_dir}" STREQUAL "${_lastDir}") list (LENGTH _names _nameCount) if (_nameCount GREATER 1) Mathematica_TO_NATIVE_PATH("${_lastDir}" _commonDirMma) Mathematica_TO_NATIVE_LIST(_namesMma ${_names}) set (_code "Map[ToFileName[${_commonDirMma},#]&,${_namesMma}]") set (_hasMapPaths TRUE) else() Mathematica_TO_NATIVE_PATH("${_lastDir}/${_names}" _code) endif() if (_nativePathsMma) set (_nativePathsMma "${_nativePathsMma},${_code}") set (_requiresList TRUE) else() set (_nativePathsMma "${_code}") endif() set (_names "") endif() set (_lastDir "${_dir}") list (APPEND _names "${_name}") endforeach() if (_requiresList AND _hasMapPaths) set (_nativePathsMma "Flatten[{${_nativePathsMma}}]") elseif (_requiresList) set (_nativePathsMma "{${_nativePathsMma}}") endif() set (${_outPathStr} "${_nativePathsMma}" PARENT_SCOPE) endif() endfunction() # public function to initialize Mathematica test properties function (Mathematica_SET_TESTS_PROPERTIES) _select_configuration_run_time_dirs(_configRuntimeDirs) _get_host_library_search_path_envvars(_envVars) foreach (_envVar IN LISTS _envVars) if (DEFINED ENV{${_envVar}}) file (TO_CMAKE_PATH "$ENV{${_envVar}}" _envRuntimeDirs) # prepend Mathematica runtime directories to system ones set (_runtimeDirs ${_configRuntimeDirs} ${_envRuntimeDirs}) else() set (_runtimeDirs ${_configRuntimeDirs}) endif() if (_runtimeDirs) list (REMOVE_DUPLICATES _runtimeDirs) if (CYGWIN) # CYGWIN path list requires UNIX syntax _to_cmake_path_list(_nativeRuntimeDirs ${_runtimeDirs}) else() _to_native_path_list(_nativeRuntimeDirs ${_runtimeDirs}) endif() foreach (_testName ${ARGV}) if ("${_testName}" STREQUAL "PROPERTIES") break() endif() set_property (TEST ${_testName} APPEND PROPERTY ENVIRONMENT "${_envVar}=${_nativeRuntimeDirs}" ) endforeach() endif() endforeach() set (_haveProperties False) foreach (_testName IN ITEMS ${ARGV}) if ("${_testName}" STREQUAL "PROPERTIES") set (_haveProperties True) break() endif() set_property (TEST ${_testName} APPEND PROPERTY LABELS "Mathematica") endforeach() if (_haveProperties) set_tests_properties (${ARGV}) endif() endfunction(Mathematica_SET_TESTS_PROPERTIES) # internal macro to return test driver for host platform function (_add_test_driver _cmdVar _testName _inputVar _inputFileVar) if (CMAKE_HOST_UNIX) set (_testDriver "${Mathematica_CMAKE_MODULE_DIR}/FindMathematicaTestDriver.sh") elseif (CMAKE_HOST_WIN32) set (_testDriver "${Mathematica_CMAKE_MODULE_DIR}/FindMathematicaTestDriver.cmd") endif() if (NOT EXISTS "${_testDriver}") message (FATAL_ERROR "FindMathematica test driver script ${_testDriver} is missing.") endif() _make_file_executable(${_testDriver}) if (CYGWIN) _to_cmake_path("${_testDriver}" _testDriver) else() _to_native_path("${_testDriver}" _testDriver) endif() list (APPEND ${_cmdVar} "${_testDriver}" "${_testName}" "$") if (DEFINED ${_inputVar}) list (APPEND ${_cmdVar} "input" "${${_inputVar}}") elseif (DEFINED ${_inputFileVar}) list (APPEND ${_cmdVar} "inputfile" "${${_inputFileVar}}") else() list (APPEND ${_cmdVar} "noinput") endif() set (${_cmdVar} ${${_cmdVar}} PARENT_SCOPE) endfunction() # internal macro to add platform specific executable launch prefix macro (_add_launch_prefix _cmdVar _systemIDVar) if (DEFINED ${_systemIDVar}) if (CMAKE_HOST_APPLE) if (NOT "${${_systemIDVar}}" STREQUAL "${Mathematica_HOST_SYSTEM_ID}") # under Mac OS X, run appropriate target architecture of executable universal binary # by using the the /usr/bin/arch tool which is available since Leopard # (Mac OS X 10.5.0 is Darwin 9.0.0) if ("${CMAKE_HOST_SYSTEM_VERSION}" VERSION_LESS "9.0.0") message (STATUS "Executable system ID selection of ${${_systemIDVar}} is not supported, running default.") elseif ("${${_systemIDVar}}" STREQUAL "MacOSX-x86") list (APPEND ${_cmdVar} "/usr/bin/arch" "-i386") elseif("${${_systemIDVar}}" STREQUAL "MacOSX-x86-64") list (APPEND ${_cmdVar} "/usr/bin/arch" "-x86_64") elseif("${${_systemIDVar}}" STREQUAL "MacOSX-ARM64") list (APPEND ${_cmdVar} "/usr/bin/arch" "-arm64") elseif("${${_systemIDVar}}" MATCHES "Darwin|MacOSX") list (APPEND ${_cmdVar} "/usr/bin/arch" "-ppc") elseif("${${_systemIDVar}}" STREQUAL "Darwin-PowerPC64") list (APPEND ${_cmdVar} "/usr/bin/arch" "-ppc64") else() message (STATUS "Executable system ID ${${_systemIDVar}} is not supported, running default.") endif() endif() endif() endif() endmacro() # internal macro to set up kernel launch command macro (_add_kernel_launch_code _cmdVar _systemIDVar _kernelOptionsVar) if (CMAKE_HOST_WIN32 OR CYGWIN) set (_kernelExecutable "${Mathematica_KERNEL_EXECUTABLE}") if (DEFINED ${_systemIDVar}) # under Windows, run alternate binary for given system ID get_filename_component(_kernelName "${_kernelExecutable}" NAME) set (_kernelExecutable "${Mathematica_HOST_ROOT_DIR}/SystemFiles/Kernel/Binaries/${${_systemIDVar}}/${_kernelName}") if (NOT EXISTS "${_kernelExecutable}") set (_kernelExecutable "${Mathematica_KERNEL_EXECUTABLE}") if (NOT "${_systemIDVar}" STREQUAL "${Mathematica_HOST_SYSTEM_ID}") message (STATUS "Kernel executable for ${${_systemIDVar}} is not available, running default ${Mathematica_HOST_SYSTEM_ID}.") endif() endif() endif() _to_native_path("${_kernelExecutable}" _kernelExecutable) list (APPEND ${_cmdVar} "${_kernelExecutable}") elseif (CMAKE_HOST_APPLE) _add_launch_prefix(${_cmdVar} ${_systemIDVar}) _to_native_path("${Mathematica_KERNEL_EXECUTABLE}" _kernelExecutable) list (APPEND ${_cmdVar} "${_kernelExecutable}") elseif (CMAKE_HOST_UNIX) _to_native_path("${Mathematica_KERNEL_EXECUTABLE}" _kernelExecutable) list (APPEND ${_cmdVar} "${_kernelExecutable}") if (DEFINED ${_systemIDVar}) if (Mathematica_VERSION) if (NOT "${Mathematica_VERSION}" VERSION_LESS "8.0") # Mathematica 8 kernel wrapper shell script supports option -SystemID list (APPEND ${_cmdVar} "-SystemID" "${${_systemIDVar}}") elseif (NOT "${_systemIDVar}" STREQUAL "${Mathematica_HOST_SYSTEM_ID}") message (STATUS "Kernel system ID selection of ${${_systemIDVar}} is not supported, running default ${Mathematica_HOST_SYSTEM_ID}.") endif() endif() endif() else() message (FATAL_ERROR "Unsupported host platform ${CMAKE_HOST_SYSTEM_NAME}") endif() if (DEFINED ${_kernelOptionsVar}) list (APPEND ${_cmdVar} ${${_kernelOptionsVar}}) else() list (APPEND ${_cmdVar} "-noinit" "-noprompt") endif() endmacro(_add_kernel_launch_code) macro (_test_use_tempfile_for_code_segments _codeVar _useTempFileVar) set (_codeLength 0) set (_codeSegmentCount 1) set (_usesReservedChars FALSE) foreach (_codeSegment IN LISTS ${_codeVar}) string (LENGTH "${_codeSegment}" _codeSegmentLength) math (EXPR _codeLength "${_codeLength} + ${_codeSegmentLength}") if (_codeSegment MATCHES "(Get|Needs|Install|Sequence)\\[[^]]*\\]") # start new code segment math (EXPR _codeSegmentCount "${_codeSegmentCount} + 1") endif() if (NOT _usesReservedChars) if (_codeSegment MATCHES "[<>|&!%^]") set (_usesReservedChars TRUE) endif() endif() endforeach() if (CMAKE_HOST_WIN32 AND (_usesReservedChars OR _codeLength GREATER 1000 OR _codeSegmentCount GREATER 3)) # under Windows XP or later cmd.exe has a command line length limit of 8191 characters. # we do not use inline statements if the approximate command line length # might exceed that limit or there are too many individual arguments. # we write the inline statements to a temporary script instead set (${_useTempFileVar} TRUE) elseif (CMAKE_HOST_UNIX AND (_codeLength GREATER 10000 OR _codeSegmentCount GREATER 10)) # for UNIX use a temp file if command line becomes confusing set (${_useTempFileVar} TRUE) else() set (${_useTempFileVar} FALSE) endif() endmacro() macro (_code_segments_to_compound_expressions _codeVar _codeSegments) # collect all CODE sections into CompoundExpressions set (${_codeSegments} "") set (_currentCodeSegment "") set (_currentCodeSegmentCompound False) foreach (_codeSegment IN LISTS ${_codeVar} ITEMS "Sequence[]") if (_codeSegment MATCHES "\n") # remove indentation with tabs string (REGEX REPLACE "\t+" "" _codeSegment "${_codeSegment}") # separate multiple lines via commas string (REPLACE "\n" "," _codeSegment "${_codeSegment}") endif() # prevent CMake from interpreting ; as a list separator string (REPLACE ";" "\\;" _codeSegment "${_codeSegment}") if (_currentCodeSegment) if (NOT _codeSegment STREQUAL "Sequence[]") set (_currentCodeSegmentCompound True) set (_currentCodeSegment "${_currentCodeSegment},${_codeSegment}") endif() else() set (_currentCodeSegment "${_codeSegment}") endif() # flush current CompoundExpression when a Get[...], Needs[...] or Install[...] # expression is encountered, so that new context definitions become effective # immediately for subsequent commands # Sequence[] can be used to explicitly flush the current CompoundExpression if (_codeSegment MATCHES "(Get|Needs|Install|Sequence)\\[[^]]*\\]") if (_currentCodeSegmentCompound OR (CMAKE_HOST_WIN32 AND NOT _currentCodeSegment MATCHES " ")) # note that the blanks around the CompoundExpression argument below are necessary # to force CMake to do proper cmd.exe quoting of the resulting parameter under Windows # (a comma in the parameter may be misinterpreted as a separator otherwise) list (APPEND ${_codeSegments} "-run" "CompoundExpression[ ${_currentCodeSegment} ]") elseif (NOT _currentCodeSegment STREQUAL "Sequence[]") # flush single code segment, but only if it is not a NOP list (APPEND ${_codeSegments} "-run" "${_currentCodeSegment}") endif() set (_currentCodeSegment "") set (_currentCodeSegmentCompound False) endif() endforeach() endmacro(_code_segments_to_compound_expressions) macro (_code_segments_to_tempfile _codeVar _tempScriptFile) # check for use of CMake generator expressions in inline code set (_contentsHasGeneratorExpressions FALSE) set (_contents "") foreach (_codeSegment IN LISTS ${_codeVar}) string (REPLACE ";" "\\;" _line "${_codeSegment}") list (APPEND _contents "${_line}") if (NOT _contentsHasGeneratorExpressions) if ("${_line}" MATCHES "\\$<.*>") set (_contentsHasGeneratorExpressions TRUE) endif() endif() endforeach() string (REPLACE ";" "\n" _contents "${_contents}") # use script content MD5 as temporary file name string (MD5 _scriptName "${_contents}") set (_tempScript "${CMAKE_CURRENT_BINARY_DIR}/FindMathematica/${_scriptName}.m") file (WRITE "${_tempScript}" "${_contents}") if (_contentsHasGeneratorExpressions) set (_configNameOrNoneGeneratorExpression "$<$:None>$<$>:$>") set (_tempConfigScript "${CMAKE_CURRENT_BINARY_DIR}/FindMathematica/${_scriptName}_${_configNameOrNoneGeneratorExpression}.m") file (GENERATE OUTPUT "${_tempConfigScript}" INPUT "${_tempScript}") else() set (_tempConfigScript "${_tempScript}") endif() set (${_tempScriptFile} "${_tempConfigScript}") endmacro(_code_segments_to_tempfile) # internal macro to translate CODE or SCRIPT option to Mathematica launch command macro (_add_script_or_code _cmdVar _scriptVar _codeVar) if (DEFINED ${_codeVar} OR DEFINED ${_scriptVar}) # start with code to prepend the FindMathematica module directory to the Mathematica $Path Mathematica_TO_NATIVE_PATH("${Mathematica_CMAKE_MODULE_DIR}" _cmakeModuleDirMma) set (_code "PrependTo[$Path, ${_cmakeModuleDirMma}]") # add given inline code statements if (DEFINED ${_codeVar}) list (APPEND _code ${${_codeVar}}) endif() # compute absolute path to given script if (DEFINED ${_scriptVar}) if (IS_ABSOLUTE "${${_scriptVar}}") _to_cmake_path("${${_scriptVar}}" _scriptFileAbs) else() _to_cmake_path("${CMAKE_CURRENT_SOURCE_DIR}/${${_scriptVar}}" _scriptFileAbs) endif() endif() if (NOT DEFINED ${_scriptVar}) # no given script, quit kernel explicitly unless last code statement already does it list (GET _code -1 _lastStatement) if (NOT _lastStatement MATCHES "^(Quit|Exit)\\[") list (APPEND _code "Quit[]") endif() elseif ("${Mathematica_VERSION}" VERSION_LESS "10.0") # Although the -script option is supported since Mathematica 8, under Mathematica 9 # using the -script option does not work as expected, if it is preceded by multiple inline # Mathematica commands using the -run option. # Thus we use the Get function instead, which should work with all versions. # According to https://reference.wolfram.com/language/tutorial/WolframLanguageScripts.html # running the kernel with the -script option is equivalent to reading the file using the Get function # with a single difference: after the last command in the file is evaluated, the kernel terminates Mathematica_TO_NATIVE_PATH("${_scriptFileAbs}" _scriptFileMma) list (APPEND _code "Get[${_scriptFileMma}]" "Quit[]") endif() # convert resulting code to kernel inline code segments or if necessary to a temporary script file _test_use_tempfile_for_code_segments(_code _useTempFile) if (_useTempFile) _code_segments_to_tempfile(_code _tempScriptFile) Mathematica_TO_NATIVE_PATH("${_tempScriptFile}" _tempScriptFileMma) list (APPEND ${_cmdVar} "-run" "Get[${_tempScriptFileMma}]") else() _code_segments_to_compound_expressions(_code _codeSegments) list (APPEND ${_cmdVar} ${_codeSegments}) endif() # finally, run given script with -script option if using Mathematica 10 or later if (DEFINED ${_scriptVar}) if (NOT "${Mathematica_VERSION}" VERSION_LESS "10.0") list (APPEND ${_cmdVar} "-script" "${_scriptFileAbs}") # after the last command in the script file is evaluated, the kernel terminates automatically endif() endif() endif() endmacro(_add_script_or_code) # internal macro to set up linkmode launch command macro (_add_linkmode_launch_code _cmdVar _protocolKind _systemIDVar _kernelOptionsVar _linkProtocolVar _scriptVar _codeVar) list (APPEND ${_cmdVar} "-linkmode" "launch") if (DEFINED ${_linkProtocolVar}) list (APPEND ${_cmdVar} "-linkprotocol" "${${_linkProtocolVar}}") endif() list (APPEND ${_cmdVar} "-linkname") if (UNIX AND NOT CYGWIN) # UNIX (except for Cygwin) requires quoted link name path and -mathlink or -wstp set (_kernelLaunchArgs "") _add_kernel_launch_code(_kernelLaunchArgs ${_systemIDVar} ${_kernelOptionsVar}) _add_script_or_code(_kernelLaunchArgs ${_scriptVar} ${_codeVar}) _list_to_cmd_str(_kernelLaunchStr ${_kernelLaunchArgs}) list (APPEND ${_cmdVar} "${_kernelLaunchStr} ${_protocolKind}") else () _add_kernel_launch_code(${_cmdVar} ${_systemIDVar} ${_kernelOptionsVar}) endif() endmacro() if (Mathematica_KERNEL_EXECUTABLE) # public function for executing Mathematica code file at configuration time function (Mathematica_EXECUTE) set(_options "") list(APPEND _options CACHE) set(_oneValueArgs SCRIPT SYSTEM_ID INPUT_FILE OUTPUT_FILE ERROR_FILE RESULT_VARIABLE OUTPUT_VARIABLE ERROR_VARIABLE TIMEOUT DOC) set(_multiValueArgs CODE KERNEL_OPTIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") elseif (NOT _option_CODE AND NOT _option_SCRIPT) message (FATAL_ERROR "Either the keyword CODE or SCRIPT must be present.") endif() if (_option_CACHE AND _option_OUTPUT_VARIABLE) if (DEFINED "${_option_OUTPUT_VARIABLE}") set (_var "${${_option_OUTPUT_VARIABLE}}") if (_var AND NOT "${_var}" MATCHES "\\$Failed|\\$Aborted|Mathematica cannot find a valid password") # use result from cache if is not a false constant, $Failed, $Aborted or not properly registered return() endif() endif() endif() set (_cmd COMMAND) _add_kernel_launch_code(_cmd _option_SYSTEM_ID _option_KERNEL_OPTIONS) _add_script_or_code(_cmd _option_SCRIPT _option_CODE) if (_option_CODE) list (APPEND _cmd OUTPUT_STRIP_TRAILING_WHITESPACE) list (APPEND _cmd ERROR_STRIP_TRAILING_WHITESPACE) endif() foreach (_key IN LISTS _oneValueArgs) set (_value "_option_${_key}") if (DEFINED ${_value}) if (_key MATCHES "_VARIABLE$") list (APPEND _cmd ${_key} "${${_value}}") list (APPEND _variables "${${_value}}") elseif (NOT _key MATCHES "SCRIPT|CODE|SYSTEM_ID|DOC") list (APPEND _cmd ${_key} "${${_value}}") endif() endif() endforeach() list (APPEND _cmd WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}") if (Mathematica_DEBUG) message (STATUS "execute_process: ${_cmd}") endif() execute_process (${_cmd}) # put result to cache if (_option_OUTPUT_VARIABLE) # if Mathematica is not registered properly, print a warning if ("${${_option_OUTPUT_VARIABLE}}" MATCHES "Mathematica cannot find a valid password") message (WARNING "${${_option_OUTPUT_VARIABLE}}") return() endif() endif() if (_option_CACHE AND _option_OUTPUT_VARIABLE) if (NOT _option_DOC) set (_option_DOC "Mathematica_EXECUTE kernel output.") endif() set (${_option_OUTPUT_VARIABLE} "${${_option_OUTPUT_VARIABLE}}" CACHE STRING "${_option_DOC}" FORCE) endif() # propagate variables to parent scope foreach (_var IN LISTS _variables) if (DEFINED ${_var}) set (${_var} ${${_var}} PARENT_SCOPE) endif() endforeach() endfunction(Mathematica_EXECUTE) # public function for executing Mathematica code at build time as a standalone target function (Mathematica_ADD_CUSTOM_TARGET _targetName) set(_options ALL) set(_oneValueArgs SCRIPT COMMENT SYSTEM_ID) set(_multiValueArgs CODE DEPENDS SOURCES KERNEL_OPTIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") elseif (NOT _option_CODE AND NOT _option_SCRIPT) message (FATAL_ERROR "Either the keyword CODE or SCRIPT must be present.") endif() set (_cmd "${_targetName}") if (_option_ALL) list(APPEND _cmd "ALL") endif() list(APPEND _cmd COMMAND) _add_kernel_launch_code(_cmd _option_SYSTEM_ID _option_KERNEL_OPTIONS) _add_script_or_code(_cmd _option_SCRIPT _option_CODE) if (_option_SCRIPT) list (APPEND _option_DEPENDS ${_option_SCRIPT}) endif() if (_option_DEPENDS) list (APPEND _cmd DEPENDS ${_option_DEPENDS}) endif() if (_option_COMMENT) list(APPEND _cmd COMMENT ${_option_COMMENT}) endif() if (_option_SOURCES) list(APPEND _cmd SOURCES ${_option_SOURCES}) endif() list (APPEND _cmd WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" VERBATIM) if (Mathematica_DEBUG) message (STATUS "add_custom_target: ${_cmd}") endif() add_custom_target(${_cmd}) endfunction(Mathematica_ADD_CUSTOM_TARGET) # public function for executing Mathematica code at build time to produce output files function (Mathematica_ADD_CUSTOM_COMMAND) set(_options PRE_BUILD PRE_LINK POST_BUILD APPEND) set(_oneValueArgs SCRIPT COMMENT MAIN_DEPENDENCY TARGET SYSTEM_ID) set(_multiValueArgs CODE OUTPUT DEPENDS KERNEL_OPTIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") elseif (NOT _option_CODE AND NOT _option_SCRIPT) message (FATAL_ERROR "Either the keyword CODE or SCRIPT must be present.") elseif (NOT _option_OUTPUT AND NOT _option_TARGET) message (FATAL_ERROR "Either the keyword OUTPUT or TARGET must be present.") elseif (_option_OUTPUT AND _option_TARGET) message (FATAL_ERROR "Keywords OUTPUT and TARGET are mutually exclusive.") endif() if (_option_OUTPUT) set (_cmd OUTPUT ${_option_OUTPUT}) endif() if (_option_TARGET) set (_cmd TARGET ${_option_TARGET}) endif() if (_option_PRE_BUILD) list(APPEND _cmd PRE_BUILD) endif() if (_option_PRE_LINK) list(APPEND _cmd PRE_LINK) endif() if (_option_POST_BUILD) list(APPEND _cmd POST_BUILD) endif() list(APPEND _cmd COMMAND) _add_kernel_launch_code(_cmd _option_SYSTEM_ID _option_KERNEL_OPTIONS) _add_script_or_code(_cmd _option_SCRIPT _option_CODE) if (_option_MAIN_DEPENDENCY) list(APPEND _cmd MAIN_DEPENDENCY ${_option_MAIN_DEPENDENCY}) endif() if (_option_SCRIPT AND _option_OUTPUT) list (APPEND _option_DEPENDS ${_option_SCRIPT}) endif() if (_option_DEPENDS) list(APPEND _cmd DEPENDS ${_option_DEPENDS}) endif() if (_option_COMMENT) list(APPEND _cmd COMMENT ${_option_COMMENT}) endif() list (APPEND _cmd WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" VERBATIM) if (_option_APPEND) list(APPEND _cmd APPEND) endif() if (Mathematica_DEBUG) message (STATUS "add_custom_command: ${_cmd}") endif() add_custom_command(${_cmd}) endfunction(Mathematica_ADD_CUSTOM_COMMAND) # public function to simplify testing Mathematica commands function (Mathematica_ADD_TEST) set(_options "") set(_oneValueArgs NAME SCRIPT INPUT INPUT_FILE SYSTEM_ID) set(_multiValueArgs CODE CONFIGURATIONS COMMAND KERNEL_OPTIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") elseif (NOT _option_NAME) message (FATAL_ERROR "Mandatory parameter NAME is missing.") elseif (NOT _option_CODE AND NOT _option_SCRIPT AND NOT _option_COMMAND) message (FATAL_ERROR "Either the keyword CODE, SCRIPT or COMMAND must be present.") endif() set (_cmd NAME "${_option_NAME}" COMMAND) _add_test_driver(_cmd "${_option_NAME}" _option_INPUT _option_INPUT_FILE) if (_option_COMMAND) _add_launch_prefix(_cmd _option_SYSTEM_ID) list (APPEND _cmd ${_option_COMMAND}) else() _add_kernel_launch_code(_cmd _option_SYSTEM_ID _option_KERNEL_OPTIONS) _add_script_or_code(_cmd _option_SCRIPT _option_CODE) endif() if (_option_CONFIGURATIONS) list (APPEND _cmd CONFIGURATIONS ${_option_CONFIGURATIONS}) endif() if (Mathematica_DEBUG) message (STATUS "add_test: ${_cmd}") endif() add_test (${_cmd}) endfunction (Mathematica_ADD_TEST) # public function to add target that runs Mathematica Encode function on input files function (Mathematica_ENCODE) set(_options "CHECK_TIMESTAMPS") set(_oneValueArgs "COMMENT" "KEY" "MACHINE_ID") set(_multiValueArgs "OUTPUT") cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) set (_inputFiles ${_option_UNPARSED_ARGUMENTS}) list (LENGTH _inputFiles _inputFileCount) if (_inputFileCount EQUAL 0) message (WARNING "No input files to encode given.") return() endif() if (_option_OUTPUT) set (_outputFiles ${_option_OUTPUT}) else() # no output option given, write encoded files to CMAKE_CURRENT_BINARY_DIR set (_outputFiles ${CMAKE_CURRENT_BINARY_DIR}) endif() list (LENGTH _outputFiles _outputFileCount) if (_outputFileCount EQUAL 1 AND _inputFileCount GREATER 1 AND IS_DIRECTORY "${_outputFiles}") # OUTPUT option is a single existing directory, write encoded files to it set (_outputDir "${_outputFiles}") math(EXPR _lastIndex "${_inputFileCount} - 2") foreach(_index RANGE ${_lastIndex}) list (APPEND _outputFiles "${_outputDir}") endforeach() set (_outputFileCount ${_inputFileCount}) endif() set (_outputFilesAbs "") set (_inputFilesAbs "") set (_outputDirs "") if (_outputFileCount EQUAL _inputFileCount) math(EXPR _lastIndex "${_inputFileCount} - 1") foreach(_index RANGE ${_lastIndex}) list (GET _inputFiles ${_index} _inputFile) get_filename_component(_inputFileAbs "${_inputFile}" ABSOLUTE) list (APPEND _inputFilesAbs "${_inputFileAbs}") list (GET _outputFiles ${_index} _outputFile) if (IS_DIRECTORY "${_outputFile}") file (RELATIVE_PATH _inputFileRel ${CMAKE_CURRENT_SOURCE_DIR} "${_inputFileAbs}") if (NOT IS_ABSOLUTE "${_inputFileRel}" AND NOT "${_inputFileRel}" MATCHES "^\\.\\.") set (_outputFile "${_outputFile}/${_inputFileRel}") else() get_filename_component(_inputFileName "${_inputFile}" NAME) set (_outputFile "${_outputFile}/${_inputFileName}") endif() endif() if (IS_ABSOLUTE "${_outputFile}") list (APPEND _outputFilesAbs "${_outputFile}") else() list (APPEND _outputFilesAbs "${CMAKE_CURRENT_BINARY_DIR}/${_outputFile}") endif() get_filename_component(_outputFileDir "${_outputFile}" DIRECTORY) if (NOT _outputFileDir STREQUAL "${CMAKE_CURRENT_BINARY_DIR}") list (APPEND _outputDirs "${_outputFileDir}") endif() endforeach() else() # OUTPUT option must have exactly one entry for each input file message (FATAL_ERROR "Number of output files (${_outputFileCount}) does not match number of input files (${_inputFileCount}).") endif() Mathematica_TO_NATIVE_PATH("${_inputFilesAbs}" _inputFilesAbsMma) Mathematica_TO_NATIVE_PATH("${_outputFilesAbs}" _outputFilesAbsMma) set (_cmdOptionsMma "") if (_option_KEY) Mathematica_TO_NATIVE_STRING("${_option_KEY}" _keyMma) set (_cmdOptionsMma "${_cmdOptionsMma},${_keyMma}") endif() if (_option_MACHINE_ID) Mathematica_TO_NATIVE_STRING("${_option_MACHINE_ID}" _machineIDMma) set (_cmdOptionsMma "${_cmdOptionsMma},MachineID->${_machineIDMma}") endif() if (_option_CHECK_TIMESTAMPS) set (_encodeFunc "If[FileType[#2]==None||OrderedQ[{FileDate[#2],FileDate[#1]}],Encode[#1,#2${_cmdOptionsMma}]]&") else() set (_encodeFunc "Encode[#1,#2${_cmdOptionsMma}]&") endif() if (_inputFileCount EQUAL 1) set (_func "Apply") else() set (_func "MapThread") endif() set (_cmds "") if (_outputDirs) list (SORT _outputDirs) list (REMOVE_DUPLICATES _outputDirs) Mathematica_TO_NATIVE_PATH("${_outputDirs}" _outputDirsMma) list (APPEND _cmds "Quiet[CreateDirectory[${_outputDirsMma}]]") endif() list (APPEND _cmds "${_func}[${_encodeFunc},{${_inputFilesAbsMma},${_outputFilesAbsMma}}]") if (NOT _option_COMMENT) if (_inputFileCount EQUAL 1) set (_option_COMMENT "Encoding ${_inputFiles}") else() set (_option_COMMENT "Encoding ${_inputFileCount} Mathematica files") endif() endif() Mathematica_ADD_CUSTOM_COMMAND( CODE ${_cmds} OUTPUT ${_outputFilesAbs} DEPENDS ${_inputFilesAbs} COMMENT "${_option_COMMENT}") set_source_files_properties(${_outputFilesAbs} PROPERTIES GENERATED TRUE LABELS "Mathematica") endfunction(Mathematica_ENCODE) # public function to find Mathematica package function (Mathematica_FIND_PACKAGE _var _packageName) set(_options "") set(_oneValueArgs DOC SYSTEM_ID) set(_multiValueArgs KERNEL_OPTIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") endif() # determine MUnit package directory Mathematica_TO_NATIVE_STRING("${_packageName}" _packageNameMma) # default to using FileNames function set (_findPackage "Print[StandardForm[Check[First[FileNames[ContextToFileName[${_packageNameMma}],$Path]],$Failed]]]") if (DEFINED Mathematica_VERSION) if (NOT "${Mathematica_VERSION}" VERSION_LESS "7.0") # function FindFile available since Mathematica 7 set (_findPackage "Print[StandardForm[FindFile[${_packageNameMma}]]]") endif() endif() if (NOT _option_DOC) set (_option_DOC "Mathematica package file path.") endif() set (_cmd CODE "${_findPackage}" OUTPUT_VARIABLE ${_var} CACHE DOC "${_option_DOC}" TIMEOUT 10) if (_option_KERNEL_FLAGS) list (APPEND _cmd KERNEL_OPTIONS ${_option_KERNEL_FLAGS}) endif() if (_option_SYSTEM_ID) list (APPEND _cmd SYSTEM_ID ${_option_SYSTEM_ID}) endif() # if package file variable already defined, verify package file existence if (DEFINED ${_var}) if (NOT EXISTS "${${_var}}") unset(${_var} CACHE) unset(${_var}) endif() endif() Mathematica_EXECUTE(${_cmd}) # verify package file existence if (DEFINED ${_var}) if (EXISTS "${${_var}}") _to_cmake_path("${${_var}}" ${_var}) else() set (${_var} "${_var}-NOTFOUND") endif() else() set (${_var} "${_var}-NOTFOUND") endif() set (${_var} "${${_var}}" CACHE FILEPATH "${_option_DOC}" FORCE) set (${_var} "${${_var}}" PARENT_SCOPE) endfunction() # public function to get root Mathematica package directory from a package file function (Mathematica_GET_PACKAGE_DIR _var _packageFile) _get_supported_systemIDs("${Mathematica_VERSION}" _intermediateDirs) list (APPEND _intermediateDirs "Kernel" "SystemResources" "SystemFiles" "Binaries" "Libraries" "LibraryResources" "Java" "CSource") if (NOT EXISTS "${_packageFile}") set (${_var} "${_var}-NOTFOUND" PARENT_SCOPE) return() endif() # walk up directory tree until we find package root dir set (_packageFileDir "${_packageFile}") set (_index 0) while (NOT ${_index} EQUAL -1) get_filename_component(_packageFileDir "${_packageFileDir}" DIRECTORY) get_filename_component(_name "${_packageFileDir}" NAME) list (FIND _intermediateDirs "${_name}" _index) endwhile() set (${_var} ${_packageFileDir} PARENT_SCOPE) endfunction() endif (Mathematica_KERNEL_EXECUTABLE) # re-compute system IDs and base directories, now that we can query the kernel _setup_mathematica_systemIDs() _setup_mathematica_creationID() _setup_mathematica_base_directory() _setup_mathematica_userbase_directory() # find Mathematica components _find_components() _setup_mathematica_version_variables() _update_cache() _setup_found_variables() _log_found_variables() # public function for fixing shared library references to dynamic Mathematica runtime libraries under Mac OS X function (Mathematica_ABSOLUTIZE_LIBRARY_DEPENDENCIES) if (APPLE) foreach(_target ${ARGV}) get_target_property(_targetType ${_target} TYPE) if (_targetType MATCHES "MODULE_LIBRARY|SHARED_LIBRARY|EXECUTABLE") foreach(_library Mathematica_WolframLibrary_LIBRARY Mathematica_MathLink_LIBRARY Mathematica_WSTP_LIBRARY) if (DEFINED ${_library}) _get_install_name("${${_library}}" _libraryInstallName _libraryAbsPath) if (_libraryInstallName) add_custom_command (TARGET ${_target} POST_BUILD COMMAND "${CMAKE_INSTALL_NAME_TOOL}" "-change" "${_libraryInstallName}" "${_libraryAbsPath}" "$" VERBATIM) endif() endif() endforeach() endif() endforeach() endif() endfunction() if (Mathematica_KERNEL_EXECUTABLE AND Mathematica_MathLink_FOUND) # public function to simplify testing MathLink programs function (Mathematica_MathLink_ADD_TEST) set(_options "") set(_oneValueArgs NAME SCRIPT TARGET INPUT INPUT_FILE SYSTEM_ID LINK_PROTOCOL LINK_MODE) set(_multiValueArgs CODE CONFIGURATIONS KERNEL_OPTIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") elseif (NOT _option_TARGET) message (FATAL_ERROR "Mandatory parameter TARGET is missing.") elseif (NOT _option_NAME) message (FATAL_ERROR "Mandatory parameter NAME is missing.") endif() if (NOT _option_LINK_MODE) if (_option_CODE OR _option_SCRIPT) set (_option_LINK_MODE "ParentConnect") else() set (_option_LINK_MODE "Launch") endif() endif() set (_cmd NAME "${_option_NAME}" COMMAND) _add_test_driver(_cmd "${_option_NAME}" _option_INPUT _option_INPUT_FILE) if (_option_LINK_MODE MATCHES "^ParentConnect$") # run Mathematica kernel and launch MathLink executable as a child process that connects with ParentConnect if (CYGWIN OR MSYS) get_target_property (_targetFile ${_option_TARGET} LOCATION) Mathematica_TO_NATIVE_PATH("${_targetFile}" _installCmdMma) else() set (_installCmdMma "\"$\"") endif() set (_launch_prefix "") _add_launch_prefix(_launch_prefix _option_SYSTEM_ID) if (_launch_prefix) Mathematica_TO_NATIVE_LIST(_launch_prefixMma ${_launch_prefix}) set (_installCmdMma "StringJoin[StringInsert[${_launch_prefixMma},\" \",-1],StringInsert[${_installCmdMma},\"\\\"\",{1,-1}]]" ) endif() if (_option_LINK_PROTOCOL) set (_installCmd "link=Install[${_installCmdMma},LinkProtocol->\"${_option_LINK_PROTOCOL}\"]") else() set (_installCmd "link=Install[${_installCmdMma}]") endif() if (_option_CODE) list (APPEND _installCmd ${_option_CODE}) endif() if (NOT _option_SCRIPT) list (APPEND _installCmd "Uninstall[link]") endif() _add_kernel_launch_code(_cmd _option_SYSTEM_ID _option_KERNEL_OPTIONS) _add_script_or_code(_cmd _option_SCRIPT _installCmd) elseif (_option_LINK_MODE MATCHES "^Launch$") # run MathLink executable as front-end to Mathematica kernel _add_launch_prefix(_cmd _option_SYSTEM_ID) list (APPEND _cmd "$") _add_linkmode_launch_code(_cmd "-mathlink" _option_SYSTEM_ID _option_KERNEL_OPTIONS _option_LINK_PROTOCOL _option_SCRIPT _option_CODE) else() message (FATAL_ERROR "Parameter LINK_MODE must be either \"Launch\" or \"ParentConnect\".") endif() if (_option_CONFIGURATIONS) list (APPEND _cmd CONFIGURATIONS ${_option_CONFIGURATIONS}) endif() if (Mathematica_DEBUG) message (STATUS "add_test: ${_cmd}") endif() add_test (${_cmd}) endfunction(Mathematica_MathLink_ADD_TEST) endif (Mathematica_KERNEL_EXECUTABLE AND Mathematica_MathLink_FOUND) if (Mathematica_KERNEL_EXECUTABLE AND Mathematica_WSTP_FOUND) # public function to simplify testing WSTP programs function (Mathematica_WSTP_ADD_TEST) set(_options "") set(_oneValueArgs NAME SCRIPT TARGET INPUT INPUT_FILE SYSTEM_ID LINK_PROTOCOL) set(_multiValueArgs CODE CONFIGURATIONS KERNEL_OPTIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") elseif (NOT _option_TARGET) message (FATAL_ERROR "Mandatory parameter TARGET is missing.") elseif (NOT _option_NAME) message (FATAL_ERROR "Mandatory parameter NAME is missing.") endif() set (_cmd NAME "${_option_NAME}" COMMAND) _add_test_driver(_cmd "${_option_NAME}" _option_INPUT _option_INPUT_FILE) if (_option_CODE OR _option_SCRIPT) # run Mathematica kernel and install WSTP executable if (CYGWIN OR MSYS) get_target_property (_targetFile ${_option_TARGET} LOCATION) Mathematica_TO_NATIVE_PATH("${_targetFile}" _installCmdMma) else() set (_installCmdMma "\"$\"") endif() set (_launch_prefix "") _add_launch_prefix(_launch_prefix _option_SYSTEM_ID) if (_launch_prefix) Mathematica_TO_NATIVE_LIST(_launch_prefixMma ${_launch_prefix}) set (_installCmdMma "StringJoin[StringInsert[${_launch_prefixMma},\" \",-1],StringInsert[${_installCmdMma},\"\\\"\",{1,-1}]]" ) endif() if (_option_LINK_PROTOCOL) set (_installCmd "link=Install[${_installCmdMma},LinkProtocol->\"${_option_LINK_PROTOCOL}\"]") else() set (_installCmd "link=Install[${_installCmdMma}]") endif() if (_option_CODE) list (APPEND _installCmd ${_option_CODE}) endif() if (NOT _option_SCRIPT) list (APPEND _installCmd "Uninstall[link]") endif() _add_kernel_launch_code(_cmd _option_SYSTEM_ID _option_KERNEL_OPTIONS) _add_script_or_code(_cmd _option_SCRIPT _installCmd) else() # run WSTP executable as front-end to Mathematica kernel _add_launch_prefix(_cmd _option_SYSTEM_ID) list (APPEND _cmd "$") _add_linkmode_launch_code(_cmd "-wstp" _option_SYSTEM_ID _option_KERNEL_OPTIONS _option_LINK_PROTOCOL _option_SCRIPT _option_CODE) endif() if (_option_CONFIGURATIONS) list (APPEND _cmd CONFIGURATIONS ${_option_CONFIGURATIONS}) endif() if (Mathematica_DEBUG) message (STATUS "add_test: ${_cmd}") endif() add_test (${_cmd}) endfunction(Mathematica_WSTP_ADD_TEST) endif (Mathematica_KERNEL_EXECUTABLE AND Mathematica_WSTP_FOUND) if (Mathematica_KERNEL_EXECUTABLE AND Mathematica_WolframLibrary_FOUND) # public function to add target that creates C code from Mathematica code function (Mathematica_GENERATE_C_CODE _packageFile) get_filename_component(_packageFileBaseName ${_packageFile} NAME_WE) get_filename_component(_packageFileName ${_packageFile} NAME) get_filename_component(_packageFileAbs ${_packageFile} ABSOLUTE) set(_options "") set(_oneValueArgs "OUTPUT") set(_multiValueArgs "DEPENDS") cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") endif() if (_option_OUTPUT) set (_cSource "${_option_OUTPUT}") get_filename_component(_cHeaderBaseName ${_cSource} NAME_WE) set (_cHeader "${_cHeaderBaseName}.h") else() set (_cSource "${_packageFileName}.c") set (_cHeader "${_packageFileName}.h") set (_cHeaderBaseName "${_packageFileName}") endif() Mathematica_TO_NATIVE_PATH(${_packageFileAbs} _packageFileAbsMma) Mathematica_TO_NATIVE_PATH(${_cSource} _cSourceMma) Mathematica_TO_NATIVE_PATH(${_cHeader} _cHeaderMma) Mathematica_TO_NATIVE_STRING(${_cHeaderBaseName} _cHeaderBaseNameMma) Mathematica_TO_NATIVE_STRING(${_packageFileBaseName} _packageFileBaseNameMma) string (REGEX REPLACE "\n|\t" "" _codeGenerate "Module[{functions=Get[${_packageFileAbsMma}]}, If[ListQ[functions], CompoundExpression[ CCodeGenerate[Sequence@@functions,${_cSourceMma}, \"CodeTarget\"->\"WolframRTL\", \"HeaderName\"->${_cHeaderBaseNameMma}, \"LifeCycleFunctionNames\"->${_packageFileBaseNameMma}], CCodeGenerate[Sequence@@functions,${_cHeaderMma}, \"CodeTarget\"->\"WolframRTLHeader\", \"LifeCycleFunctionNames\"->${_packageFileBaseNameMma}] ] ] ]") list (INSERT _codeGenerate 0 "Needs[\"CCodeGenerator`\"]") set (_msg "Generating source ${_cSource} and header ${_cHeader} from ${_packageFile}") list (INSERT _option_DEPENDS 0 "${_packageFileAbs}") Mathematica_ADD_CUSTOM_COMMAND( OUTPUT "${_cSource}" "${_cHeader}" CODE ${_codeGenerate} DEPENDS ${_option_DEPENDS} COMMENT "${_msg}") set_source_files_properties("${_cSource}" "${_cHeader}" PROPERTIES GENERATED TRUE LABELS "Mathematica") endfunction(Mathematica_GENERATE_C_CODE) # public function to simplify testing WolframLibrary targets function (Mathematica_WolframLibrary_ADD_TEST) set(_options "") set(_oneValueArgs NAME SCRIPT TARGET INPUT INPUT_FILE SYSTEM_ID) set(_multiValueArgs CODE CONFIGURATIONS KERNEL_OPTIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") elseif (NOT _option_TARGET) message (FATAL_ERROR "Mandatory parameter TARGET is missing.") elseif (NOT _option_NAME) message (FATAL_ERROR "Mandatory parameter NAME is missing.") elseif (NOT _option_CODE AND NOT _option_SCRIPT) message (FATAL_ERROR "Either the keyword CODE or SCRIPT must be present.") endif() set (_cmd NAME "${_option_NAME}" COMMAND) _add_test_driver(_cmd "${_option_NAME}" _option_INPUT _option_INPUT_FILE) # run Mathematica kernel and load Wolfram library if (CYGWIN OR MSYS) get_target_property (_targetFile ${_option_TARGET} LOCATION) Mathematica_TO_NATIVE_PATH("${_targetFile}" _targetFileMma) else() set (_targetFileMma "\"$\"") endif() set (_installCmd "libPath = ${_targetFileMma}" "LibraryLoad[libPath]" "Print[LibraryLink`$LibraryError]" ) if (_option_CODE) list (APPEND _installCmd ${_option_CODE}) endif() if (NOT _option_SCRIPT) list (APPEND _installCmd "LibraryUnload[libPath]") endif() _add_kernel_launch_code(_cmd _option_SYSTEM_ID _option_KERNEL_OPTIONS) _add_script_or_code(_cmd _option_SCRIPT _installCmd) if (_option_CONFIGURATIONS) list (APPEND _cmd CONFIGURATIONS ${_option_CONFIGURATIONS}) endif() if (Mathematica_DEBUG) message (STATUS "add_test: ${_cmd}") endif() add_test (${_cmd}) endfunction(Mathematica_WolframLibrary_ADD_TEST) endif (Mathematica_KERNEL_EXECUTABLE AND Mathematica_WolframLibrary_FOUND) if (Mathematica_WolframLibrary_FOUND) # public function that sets dynamic library names according to LibraryLink naming conventions function (Mathematica_WolframLibrary_SET_PROPERTIES) set (_haveProperties False) foreach (_libraryName ${ARGV}) if ("${_libraryName}" STREQUAL "PROPERTIES") set (_haveProperties True) break() endif() set_target_properties (${_libraryName} PROPERTIES PREFIX "") if (WIN32 OR CYGWIN) set_target_properties (${_libraryName} PROPERTIES SUFFIX ".dll") elseif (APPLE) set_target_properties (${_libraryName} PROPERTIES SUFFIX ".dylib") elseif (UNIX) set_target_properties (${_libraryName} PROPERTIES SUFFIX ".so") endif() set_target_properties (${_libraryName} PROPERTIES LABELS "Mathematica") if (CYGWIN AND CMAKE_COMPILER_IS_GNUCC) # Mathematica kernel cannot load Cygwin generated libraries linked with Cygwin runtime DLL # a work-around is to use the -mno-cygwin flag, which is only supported by gcc 3.x, not by gcc 4.x if (NOT "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "3.0.0" AND "${CMAKE_C_COMPILER_VERSION}" VERSION_LESS "4.0.0") set_target_properties (${_libraryName} PROPERTIES COMPILE_OPTIONS "-mno-cygwin") set_target_properties (${_libraryName} PROPERTIES LINK_FLAGS "-mno-cygwin") endif() endif() endforeach() if (_haveProperties) set_target_properties (${ARGV}) endif() endfunction(Mathematica_WolframLibrary_SET_PROPERTIES) # public function for creating dynamic library loadable with LibraryLink function (Mathematica_ADD_LIBRARY _libraryName) add_library (${_libraryName} MODULE ${ARGN}) Mathematica_WolframLibrary_SET_PROPERTIES(${_libraryName}) endfunction() endif (Mathematica_WolframLibrary_FOUND) if (Mathematica_MathLink_MPREP_EXECUTABLE) # public function for creating source file from template file using mprep function (Mathematica_MathLink_MPREP_TARGET _templateFile) get_filename_component(_templateFileName ${_templateFile} NAME) get_filename_component(_templateFileAbs ${_templateFile} ABSOLUTE) set(_options LINE_DIRECTIVES) set(_oneValueArgs OUTPUT CUSTOM_HEADER CUSTOM_TRAILER) set(_multiValueArgs "") cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") endif() if (_option_OUTPUT) set (_outfile ${_option_OUTPUT}) else() _get_mprep_output_file("${_templateFile}" _outfile) endif() _to_native_path ("${Mathematica_MathLink_MPREP_EXECUTABLE}" _mprepExeNative) _to_native_path ("${_outfile}" _outfileNative) set (_command "${_mprepExeNative}" "-o" "${_outfileNative}") set (_dependencies "${Mathematica_MathLink_MPREP_EXECUTABLE}") if (_option_CUSTOM_HEADER) _to_native_path ("${_option_CUSTOM_HEADER}" _customHeaderNative) list (APPEND _command "-h" "${_customHeaderNative}") list (APPEND _dependencies "${_option_CUSTOM_HEADER}") endif() if (_option_CUSTOM_TRAILER) _to_native_path ("${_option_CUSTOM_TRAILER}" _customTrailerNative) list (APPEND _command "-t" "${_customTrailerNative}") list (APPEND _dependencies "${_option_CUSTOM_TRAILER}") endif() if (_option_LINE_DIRECTIVES) list (APPEND _command "-lines") else() list (APPEND _command "-nolines") endif() if (CYGWIN) # under Cygwin invoke mprep.exe with template file argument specified as # a relative path because it cannot handle absolute Cygwin UNIX paths file (RELATIVE_PATH _templateFileRel ${CMAKE_CURRENT_BINARY_DIR} ${_templateFileAbs}) list (APPEND _command "${_templateFileRel}") else() _to_native_path ("${_templateFileAbs}" _templateFileAbsNative) list (APPEND _command "${_templateFileAbsNative}") endif() set (_msg "Generating MathLink source ${_outfile} from ${_templateFileName}") add_custom_command( OUTPUT ${_outfile} COMMAND ${_command} MAIN_DEPENDENCY ${_templateFileAbs} DEPENDS ${_dependencies} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} COMMENT ${_msg} VERBATIM) set_source_files_properties(${_outfile} PROPERTIES GENERATED TRUE LABELS "Mathematica") endfunction(Mathematica_MathLink_MPREP_TARGET) # public function for creating MathLink executable from template file and source files function (Mathematica_MathLink_ADD_EXECUTABLE _executableName _templateFile) _get_mprep_output_file(${_templateFile} _outfile) Mathematica_MathLink_MPREP_TARGET(${_templateFile} OUTPUT ${_outfile}) add_executable (${_executableName} WIN32 ${_outfile} ${ARGN}) target_link_libraries(${_executableName} PRIVATE ${Mathematica_MathLink_LIBRARIES}) if (Mathematica_MathLink_LINKER_FLAGS) set_target_properties(${_executableName} PROPERTIES LINK_FLAGS "${Mathematica_MathLink_LINKER_FLAGS}") endif() set_target_properties (${_executableName} PROPERTIES LABELS "Mathematica") endfunction() # public function for exporting standard mprep header and trailer code function (Mathematica_MathLink_MPREP_EXPORT_FRAMES) set(_options FORCE) set(_oneValueArgs OUTPUT_DIRECTORY SYSTEM_ID) set(_multiValueArgs "") cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if (NOT _option_OUTPUT_DIRECTORY) set (_option_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}") endif() if (NOT _option_SYSTEM_ID) set (_option_SYSTEM_ID "${Mathematica_HOST_SYSTEM_ID}") endif() set (_headerFileName "${_option_OUTPUT_DIRECTORY}/mprep_header_${_option_SYSTEM_ID}.txt") set (_trailerFileName "${_option_OUTPUT_DIRECTORY}/mprep_trailer_${_option_SYSTEM_ID}.txt") if (NOT _option_FORCE AND EXISTS "${_headerFileName}" AND EXISTS "${_trailerFileName}") message (STATUS "Mprep header file mprep_header_${_option_SYSTEM_ID}.txt already exists") message (STATUS "Mprep trailer file mprep_trailer_${_option_SYSTEM_ID}.txt already exists") return() endif() if (WIN32) set (_input_file "NUL") else() set (_input_file "/dev/null") endif() execute_process( COMMAND "${Mathematica_MathLink_MPREP_EXECUTABLE}" WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} INPUT_FILE "${_input_file}" OUTPUT_VARIABLE _mprep_frame OUTPUT_STRIP_TRAILING_WHITESPACE) # prevent CMake from interpreting ; as a list separator string (REPLACE ";" "\\;" _mprep_frame "${_mprep_frame}") string (REPLACE "\n" ";" _mprep_frame "${_mprep_frame}") set (_header "") set (_trailer "") foreach (_line IN LISTS _mprep_frame) if ("${_line}" MATCHES "MPREP_REVISION ([0-9]+)") set (_mprep_revision "${CMAKE_MATCH_1}") set (_appendToVar _header) elseif ("${_line}" MATCHES "/.*end header.*/") unset (_appendToVar) elseif ("${_line}" MATCHES "/.*begin trailer.*/") set (_appendToVar _trailer) elseif (DEFINED _appendToVar) set (${_appendToVar} "${${_appendToVar}}${_line}\n") endif() endforeach() if ("${_header}" MATCHES ".+") message (STATUS "Mprep header revision ${_mprep_revision} exported to ${_headerFileName}") file (WRITE "${_headerFileName}" "${_header}") endif() if ("${_trailer}" MATCHES ".+") message (STATUS "Mprep trailer revision ${_mprep_revision} exported to ${_trailerFileName}") file (WRITE "${_trailerFileName}" "${_trailer}") endif() endfunction(Mathematica_MathLink_MPREP_EXPORT_FRAMES) endif (Mathematica_MathLink_MPREP_EXECUTABLE) if (Mathematica_WSTP_WSPREP_EXECUTABLE) # public function for creating source file from template file using mprep function (Mathematica_WSTP_WSPREP_TARGET _templateFile) get_filename_component(_templateFileName ${_templateFile} NAME) get_filename_component(_templateFileAbs ${_templateFile} ABSOLUTE) set(_options LINE_DIRECTIVES) set(_oneValueArgs OUTPUT CUSTOM_HEADER CUSTOM_TRAILER) set(_multiValueArgs "") cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") endif() if (_option_OUTPUT) set (_outfile ${_option_OUTPUT}) else() _get_mprep_output_file("${_templateFile}" _outfile) endif() _to_native_path ("${Mathematica_WSTP_WSPREP_EXECUTABLE}" _mprepExeNative) _to_native_path ("${_outfile}" _outfileNative) set (_command "${_mprepExeNative}" "-o" "${_outfileNative}") set (_dependencies "${Mathematica_WSTP_WSPREP_EXECUTABLE}") if (_option_CUSTOM_HEADER) _to_native_path ("${_option_CUSTOM_HEADER}" _customHeaderNative) list (APPEND _command "-h" "${_customHeaderNative}") list (APPEND _dependencies "${_option_CUSTOM_HEADER}") endif() if (_option_CUSTOM_TRAILER) _to_native_path ("${_option_CUSTOM_TRAILER}" _customTrailerNative) list (APPEND _command "-t" "${_customTrailerNative}") list (APPEND _dependencies "${_option_CUSTOM_TRAILER}") endif() if (_option_LINE_DIRECTIVES) list (APPEND _command "-lines") else() list (APPEND _command "-nolines") endif() if (CYGWIN) # under Cygwin invoke mprep.exe with template file argument specified as # a relative path because it cannot handle absolute Cygwin UNIX paths file (RELATIVE_PATH _templateFileRel ${CMAKE_CURRENT_BINARY_DIR} ${_templateFileAbs}) list (APPEND _command "${_templateFileRel}") else() _to_native_path ("${_templateFileAbs}" _templateFileAbsNative) list (APPEND _command "${_templateFileAbsNative}") endif() set (_msg "Generating WSTP source ${_outfile} from ${_templateFileName}") add_custom_command( OUTPUT ${_outfile} COMMAND ${_command} MAIN_DEPENDENCY ${_templateFileAbs} DEPENDS ${_dependencies} WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} COMMENT ${_msg} VERBATIM) set_source_files_properties(${_outfile} PROPERTIES GENERATED TRUE LABELS "Mathematica") endfunction(Mathematica_WSTP_WSPREP_TARGET) # public function for creating WSTP executable from template file and source files function (Mathematica_WSTP_ADD_EXECUTABLE _executableName _templateFile) _get_mprep_output_file("${_templateFile}" _outfile) Mathematica_WSTP_WSPREP_TARGET(${_templateFile} OUTPUT ${_outfile}) add_executable (${_executableName} WIN32 ${_outfile} ${ARGN}) target_link_libraries(${_executableName} PRIVATE ${Mathematica_WSTP_LIBRARIES}) if (Mathematica_WSTP_LINKER_FLAGS) set_target_properties(${_executableName} PROPERTIES LINK_FLAGS "${Mathematica_WSTP_LINKER_FLAGS}") endif() set_target_properties (${_executableName} PROPERTIES LABELS "Mathematica") endfunction() # public function for exporting standard mprep header and trailer code function (Mathematica_WSTP_WSPREP_EXPORT_FRAMES) set(_options FORCE) set(_oneValueArgs OUTPUT_DIRECTORY SYSTEM_ID) set(_multiValueArgs "") cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if (NOT _option_OUTPUT_DIRECTORY) set (_option_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}") endif() if (NOT _option_SYSTEM_ID) set (_option_SYSTEM_ID "${Mathematica_HOST_SYSTEM_ID}") endif() set (_headerFileName "${_option_OUTPUT_DIRECTORY}/wsprep_header_${_option_SYSTEM_ID}.txt") set (_trailerFileName "${_option_OUTPUT_DIRECTORY}/wsprep_trailer_${_option_SYSTEM_ID}.txt") if (NOT _option_FORCE AND EXISTS "${_headerFileName}" AND EXISTS "${_trailerFileName}") message (STATUS "wsprep header file wsprep_header_${_option_SYSTEM_ID}.txt already exists") message (STATUS "wsprep trailer file wsprep_trailer_${_option_SYSTEM_ID}.txt already exists") return() endif() if (WIN32) set (_input_file "NUL") else() set (_input_file "/dev/null") endif() execute_process( COMMAND "${Mathematica_WSTP_WSPREP_EXECUTABLE}" WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} INPUT_FILE "${_input_file}" OUTPUT_VARIABLE _wsprep_frame OUTPUT_STRIP_TRAILING_WHITESPACE) # prevent CMake from interpreting ; as a list separator string (REPLACE ";" "\\;" _wsprep_frame "${_wsprep_frame}") string (REPLACE "\n" ";" _wsprep_frame "${_wsprep_frame}") set (_header "") set (_trailer "") foreach (_line IN LISTS _wsprep_frame) if ("${_line}" MATCHES "PREP_REVISION ([0-9]+)") set (_wsprep_revision "${CMAKE_MATCH_1}") set (_appendToVar _header) elseif ("${_line}" MATCHES "/.*end header.*/") unset (_appendToVar) elseif ("${_line}" MATCHES "/.*begin trailer.*/") set (_appendToVar _trailer) elseif (DEFINED _appendToVar) set (${_appendToVar} "${${_appendToVar}}${_line}\n") endif() endforeach() if ("${_header}" MATCHES ".+") message (STATUS "wsprep header revision ${_wsprep_revision} exported to ${_headerFileName}") file (WRITE "${_headerFileName}" "${_header}") endif() if ("${_trailer}" MATCHES ".+") message (STATUS "wsprep trailer revision ${_wsprep_revision} exported to ${_trailerFileName}") file (WRITE "${_trailerFileName}" "${_trailer}") endif() endfunction(Mathematica_WSTP_WSPREP_EXPORT_FRAMES) endif (Mathematica_WSTP_WSPREP_EXECUTABLE) if (Mathematica_MUnit_FOUND) # public function for resolving a TestSuite declaration in a Mathematica unit test file function (Mathematica_MUnit_RESOLVE_SUITE _var) set(_options "") set(_oneValueArgs RELATIVE) set(_multiValueArgs "") cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) set (${_var} "") foreach (_testSuiteFile IN LISTS _option_UNPARSED_ARGUMENTS) # parse test file names from TestSuite[{ ... }] file (STRINGS "${_testSuiteFile}" _testSuite NEWLINE_CONSUME) if ("${_testSuite}" MATCHES "TestSuite\\[") string (REPLACE "\n" "" _testSuite "${_testSuite}") string (REGEX REPLACE ".*TestSuite\\[.*{(.*)}.*\\].*" "\\1" _testSuite "${_testSuite}") string (REPLACE "," ";" _testSuite "${_testSuite}") get_filename_component(_testSuiteDir "${_testSuiteFile}" DIRECTORY) foreach (_testSuiteItem IN LISTS _testSuite) # parse quoted test file name string (REGEX REPLACE "[^\"]*\"(.*)\"[^\"]*" "\\1" _testSuiteItem "${_testSuiteItem}") _to_cmake_path("${_testSuiteDir}/${_testSuiteItem}" _testFile) if (_option_RELATIVE) file (RELATIVE_PATH _testFile "${_option_RELATIVE}" "${_testFile}") endif() list (APPEND ${_var} "${_testFile}") endforeach() else() # not a test suite file, return test suite file path itself get_filename_component(_testSuiteFile "${_testSuiteFile}" ABSOLUTE) if (_option_RELATIVE) file (RELATIVE_PATH _testSuiteFile "${_option_RELATIVE}" "${_testSuiteFile}") endif() list (APPEND ${_var} "${_testSuiteFile}") endif() endforeach() list (REMOVE_DUPLICATES ${_var}) set (${_var} "${${_var}}" PARENT_SCOPE) endfunction() # public function for adding a CMake test that runs a Mathematica MUnit test file or notebook function (Mathematica_MUnit_ADD_TEST) set(_options "") set(_oneValueArgs NAME LOGGERS SCRIPT INPUT INPUT_FILE TIMEOUT SYSTEM_ID) set(_multiValueArgs CODE CONFIGURATIONS KERNEL_OPTIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") elseif (NOT _option_NAME) message (FATAL_ERROR "Mandatory parameter NAME is missing.") elseif (NOT _option_SCRIPT) message (FATAL_ERROR "Mandatory parameter SCRIPT is missing.") endif() set (_cmd NAME "${_option_NAME}" COMMAND) _add_test_driver(_cmd "${_option_NAME}" _option_INPUT _option_INPUT_FILE) if (NOT _option_LOGGERS) # default to VerbosePrintLogger which prints detailed information for failed tests set (_option_LOGGERS "{VerbosePrintLogger[]}") endif() set (_testCmds "If[Needs[\"MUnit`\"]===$Failed,Exit[]]") if (_option_CODE) list (APPEND _testCmds ${_option_CODE}) endif() if (IS_ABSOLUTE "${_option_SCRIPT}") _to_cmake_path("${_option_SCRIPT}" _testScript) else() _to_cmake_path("${CMAKE_CURRENT_SOURCE_DIR}/${_option_SCRIPT}" _testScript) endif() get_filename_component(_testScriptExt "${_testScript}" EXT) get_filename_component(_testScriptDir "${_testScript}" DIRECTORY) Mathematica_TO_NATIVE_STRING("${_option_NAME}" _testNameMma) if ("${_testScriptExt}" MATCHES "\\.(nb|cdf)$") # notebook test run requires Mathematica front end if (DEFINED Mathematica_VERSION) if ("${Mathematica_VERSION}" VERSION_LESS "7.0") # default to using undocumented function Developer`UseFrontEnd # available in Mathematica 5.1 and newer set (_useFrontEndFunc "Developer`UseFrontEnd") else() # documented function UsingFrontEnd available since Mathematica 7 set (_useFrontEndFunc "UsingFrontEnd") endif() endif() Mathematica_TO_NATIVE_PATH("${_testScript}" _testScriptMma) string (REGEX REPLACE "\n|\t" "" _testCmd "${_useFrontEndFunc}[ CompoundExpression[ nb=NotebookOpen[${_testScriptMma},Visible->False], mUnitResult=TestRun[nb,TestRunTitle->${_testNameMma},Loggers:>${_option_LOGGERS}], NotebookClose[nb]]]") list (APPEND _testCmds "${_testCmd}") else() Mathematica_MUnit_RESOLVE_SUITE(_testFiles "${_testScript}") Mathematica_TO_NATIVE_PATH("${_testFiles}" _testFilesMma) list (LENGTH _testFiles _fileCount) if (_fileCount GREATER 1) if (DEFINED Mathematica_VERSION) if ("${Mathematica_VERSION}" VERSION_LESS "7.0") # default to using DirectoryName set (_titleExtractFunc "StringDrop[#,StringLength[DirectoryName[#]]]") else() # function FileNameTake available since Mathematica 7 set (_titleExtractFunc "FileNameTake[#]") endif() endif() Mathematica_TO_NATIVE_PATH("${_testScriptDir}" _testScriptDirMma) string (REGEX REPLACE "\n|\t" "" _testCmd "mUnitResult=And@@Map[ TestRun[#,TestRunTitle->${_titleExtractFunc},Loggers:>${_option_LOGGERS}]&, ${_testFilesMma}]") else() string (REGEX REPLACE "\n|\t" "" _testCmd "mUnitResult=TestRun[ ${_testFilesMma},TestRunTitle->${_testNameMma},Loggers:>${_option_LOGGERS}]") endif() list (APPEND _testCmds "${_testCmd}") endif() # use MUnit TestRun result as exit code to signal CTest success or failure list (APPEND _testCmds "Exit[Boole[Not[mUnitResult]]]") _add_kernel_launch_code(_cmd _option_SYSTEM_ID _option_KERNEL_OPTIONS) _add_script_or_code(_cmd _noScript _testCmds) if (_option_CONFIGURATIONS) list (APPEND _cmd CONFIGURATIONS ${_option_CONFIGURATIONS}) endif() if (Mathematica_DEBUG) message (STATUS "add_test: ${_cmd}") endif() add_test (${_cmd}) set_property (TEST ${_option_NAME} PROPERTY LABELS "Mathematica") if (_option_TIMEOUT) set_tests_properties (${_option_NAME} PROPERTIES TIMEOUT ${_option_TIMEOUT}) endif() endfunction (Mathematica_MUnit_ADD_TEST) endif (Mathematica_MUnit_FOUND) if (Mathematica_KERNEL_EXECUTABLE AND Mathematica_JLink_FOUND) # public function for adding a target which builds Mathematica documentation function (Mathematica_ADD_DOCUMENTATION _targetName) # documentation build requires Apache Ant if (CMAKE_HOST_WIN32) set (_antExecutableName "ant.bat") else() set (_antExecutableName "ant") endif() find_program(Mathematica_ANT_EXECUTABLE "${_antExecutableName}" PATHS ENV ANT_HOME PATH_SUFFIXES "bin") if (NOT Mathematica_ANT_EXECUTABLE) message (WARNING "Mathematica documentation build required Apache Ant executable \"ant\" cannot be found.") endif() # find DocumentationBuild package Mathematica_FIND_PACKAGE(Mathematica_DocumentationBuild_PACKAGE_FILE "DocumentationBuild`" DOC "Mathematica DocumentationBuild package.") if (NOT Mathematica_DocumentationBuild_PACKAGE_FILE) message (STATUS "Mathematica documentation build required package \"DocumentationBuild`\" cannot be found.") endif() Mathematica_GET_PACKAGE_DIR(Mathematica_DocumentationBuild_PACKAGE_DIR "${Mathematica_DocumentationBuild_PACKAGE_FILE}") # find Transmogrify package required by DocumentationBuild package Mathematica_FIND_PACKAGE(Mathematica_Transmogrify_PACKAGE_FILE "Transmogrify`" DOC "Mathematica Transmogrify package.") if (NOT Mathematica_Transmogrify_PACKAGE_FILE) message (STATUS "Mathematica documentation build required package \"Transmogrify`\" cannot be found.") endif() mark_as_advanced( Mathematica_ANT_EXECUTABLE Mathematica_DocumentationBuild_PACKAGE_FILE Mathematica_Transmogrify_PACKAGE_FILE ) # build command from options set(_options "ALL" "CHECK_TIMESTAMPS" "INCLUDE_NOTEBOOKS") set(_oneValueArgs DOCUMENTATION_TYPE INPUT_DIRECTORY OUTPUT_DIRECTORY APPLICATION_NAME LANGUAGE COMMENT JAVACMD) set(_multiValueArgs SOURCES) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") endif() if (NOT _option_DOCUMENTATION_TYPE) set (_option_DOCUMENTATION_TYPE "Notebook") endif() if (NOT _option_APPLICATION_NAME) set (_option_APPLICATION_NAME "${PROJECT_NAME}") endif() if (NOT _option_LANGUAGE) set (_option_LANGUAGE "English") endif() if (NOT _option_INPUT_DIRECTORY) set (_option_INPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}") endif() if (NOT _option_JAVACMD) if (Mathematica_JLink_JAVA_EXECUTABLE) set (_option_JAVACMD "${Mathematica_JLink_JAVA_EXECUTABLE}") elseif (Java_JAVA_EXECUTABLE) set (_option_JAVACMD "${Java_JAVA_EXECUTABLE}") else() if (CMAKE_HOST_WIN32) set (_option_JAVACMD "java.exe") else() set (_option_JAVACMD "java") endif() endif() endif() if (NOT _option_OUTPUT_DIRECTORY) if (_option_DOCUMENTATION_TYPE STREQUAL "Notebook") set (_option_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${_option_APPLICATION_NAME}/Documentation") else() set (_option_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${_option_APPLICATION_NAME}-${_option_DOCUMENTATION_TYPE}") endif() endif() if (NOT _option_COMMENT) set (_option_COMMENT "Building ${_option_APPLICATION_NAME} Mathematica ${_option_DOCUMENTATION_TYPE} documentation") endif() # set up custom target set (_cmd "${_targetName}") if (_option_ALL) list (APPEND _cmd ALL) endif() if (Mathematica_ANT_EXECUTABLE AND Mathematica_DocumentationBuild_PACKAGE_FILE AND Mathematica_Transmogrify_PACKAGE_FILE) # set up documentation generation script if all requirements are met set (_templateBuildScript "${Mathematica_CMAKE_MODULE_DIR}/FindMathematicaDocumentationBuild.cmake.in") set (_buildScriptName "${_option_APPLICATION_NAME}Mathematica${_option_DOCUMENTATION_TYPE}DocumentationBuild.cmake") if (NOT EXISTS "${_templateBuildScript}") message (FATAL_ERROR "FindMathematica documentation build script template ${_templateBuildScript} is missing.") endif() configure_file("${_templateBuildScript}" "${_buildScriptName}" @ONLY) list (APPEND _cmd COMMAND "${CMAKE_COMMAND}" "-P" "${CMAKE_CURRENT_BINARY_DIR}/${_buildScriptName}") list (APPEND _cmd DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/${_buildScriptName}") else() # just generate empty documentation directory and print message list (APPEND _cmd COMMAND "${CMAKE_COMMAND}" "-E" "make_directory" "${_option_OUTPUT_DIRECTORY}") list (APPEND _cmd COMMAND "${CMAKE_COMMAND}" "-E" "echo" "Required Mathematica packages for documentation building are not available.") endif() list (APPEND _cmd WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}") list (APPEND _cmd COMMENT ${_option_COMMENT} VERBATIM) if (_option_SOURCES) list (APPEND _cmd SOURCES ${_option_SOURCES}) endif() if (_option_INCLUDE_NOTEBOOKS) file (GLOB_RECURSE _docuSourceNBs "${_option_INPUT_DIRECTORY}/*.nb") if (_docuSourceNBs) if (_option_SOURCES) list (APPEND _cmd ${_docuSourceNBs}) else() list (APPEND _cmd SOURCES ${_docuSourceNBs}) endif() endif() endif() if (Mathematica_DEBUG) message (STATUS "add_custom_target: ${_cmd}") endif() add_custom_target(${_cmd}) endfunction (Mathematica_ADD_DOCUMENTATION) endif (Mathematica_KERNEL_EXECUTABLE AND Mathematica_JLink_FOUND) if (Mathematica_KERNEL_EXECUTABLE AND Mathematica_JLink_FOUND) # public function to simplify testing J/Link programs function (Mathematica_JLink_ADD_TEST) set(_options "") set(_oneValueArgs NAME MAIN_CLASS SCRIPT TARGET INPUT INPUT_FILE SYSTEM_ID LINK_PROTOCOL) set(_multiValueArgs CODE CONFIGURATIONS KERNEL_OPTIONS CLASSPATH) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if(_option_UNPARSED_ARGUMENTS) message (FATAL_ERROR "Unknown keywords: ${_option_UNPARSED_ARGUMENTS}") elseif (NOT _option_TARGET) message (FATAL_ERROR "Mandatory parameter TARGET is missing.") elseif (NOT _option_NAME) message (FATAL_ERROR "Mandatory parameter NAME is missing.") endif() set (_cmd NAME "${_option_NAME}" COMMAND) _add_test_driver(_cmd "${_option_NAME}" _option_INPUT _option_INPUT_FILE) if (TARGET ${_option_TARGET}) get_target_property (_targetJarFile ${_option_TARGET} JAR_FILE) else() _to_cmake_path("${_option_TARGET}" _targetJarFile) endif() if (_option_CODE OR _option_SCRIPT) # run Mathematica kernel and load JAR file Mathematica_TO_NATIVE_PATH("${_targetJarFile}" _targetJarFileMma) set (_installCmd "Needs[\"JLink`\"]" "AddToClassPath[${_targetJarFileMma}]") if (_option_CODE) list (INSERT _option_CODE 0 ${_installCmd}) else() set (_option_CODE ${_installCmd}) endif() _add_kernel_launch_code(_cmd _option_SYSTEM_ID _option_KERNEL_OPTIONS) _add_script_or_code(_cmd _option_SCRIPT _option_CODE) else() # run JAR file as front-end to Mathematica kernel if (NOT _option_MAIN_CLASS) get_filename_component(_option_MAIN_CLASS ${_targetJarFile} NAME_WE) endif() _to_native_path ("${Mathematica_JLink_JAR_FILE}" _jlinkJarNative) _to_native_path ("${_targetJarFile}" _targetJarFileNative) _to_native_path_list(_classPath "${_jlinkJarNative}" "${_targetJarFileNative}" ${_option_CLASSPATH}) if (Mathematica_JLink_JAVA_EXECUTABLE) list (APPEND _cmd "${Mathematica_JLink_JAVA_EXECUTABLE}") elseif (Java_JAVA_EXECUTABLE) list (APPEND _cmd "${Java_JAVA_EXECUTABLE}") else() if (CMAKE_HOST_WIN32) list (APPEND _cmd "java.exe") else() list (APPEND _cmd "java") endif() endif() if (Mathematica_JLink_RUNTIME_LIBRARY) get_filename_component(_jlinkLibraryDir ${Mathematica_JLink_RUNTIME_LIBRARY} DIRECTORY) _to_native_path ("${_jlinkLibraryDir}" _jlinkLibraryDirNative) list (APPEND _cmd "-Dcom.wolfram.jlink.libdir=${_jlinkLibraryDirNative}") endif() list (APPEND _cmd "-cp" "${_classPath}" "${_option_MAIN_CLASS}") _add_linkmode_launch_code(_cmd "-mathlink" _option_SYSTEM_ID _option_KERNEL_OPTIONS _option_LINK_PROTOCOL _option_SCRIPT _option_CODE) endif() if (_option_CONFIGURATIONS) list (APPEND _cmd CONFIGURATIONS ${_option_CONFIGURATIONS}) endif() if (Mathematica_DEBUG) message (STATUS "add_test: ${_cmd}") endif() add_test (${_cmd}) endfunction(Mathematica_JLink_ADD_TEST) endif(Mathematica_KERNEL_EXECUTABLE AND Mathematica_JLink_FOUND) ================================================ FILE: cmake/modules/FindMathematicaDocumentationBuild.cmake.in ================================================ # FindMathematica @Mathematica_CMAKE_MODULE_VERSION@ documentation build script # JAVACMD is an environment variable that points to the full path to the Java runtime executable # used by the Apache Ant wrapper scripts set (ENV{JAVACMD} "@_option_JAVACMD@") set (Mathematica_KERNEL_EXECUTABLE "@Mathematica_KERNEL_EXECUTABLE@") set (Mathematica_ANT_EXECUTABLE "@Mathematica_ANT_EXECUTABLE@") set (Mathematica_JLink_PACKAGE_DIR "@Mathematica_JLink_PACKAGE_DIR@") set (Mathematica_DocumentationBuild_PACKAGE_DIR "@Mathematica_DocumentationBuild_PACKAGE_DIR@") set (Mathematica_DEBUG "@Mathematica_DEBUG@") set (DOCU_INPUT_DIRECTORY "@_option_INPUT_DIRECTORY@") set (DOCU_OUTPUT_DIRECTORY "@_option_OUTPUT_DIRECTORY@") set (DOCU_TYPE "@_option_DOCUMENTATION_TYPE@") set (DOCU_LANGUAGE "@_option_LANGUAGE@") set (DOCU_APP_NAME "@_option_APPLICATION_NAME@") set (CHECK_TIMESTAMPS "@_option_CHECK_TIMESTAMPS@") function(_to_native_path _inPath _outPathVariable) if (CYGWIN) execute_process( COMMAND cygpath "--mixed" "${_inPath}" TIMEOUT 5 OUTPUT_VARIABLE ${_outPathVariable} OUTPUT_STRIP_TRAILING_WHITESPACE) elseif (CMAKE_HOST_WIN32) string (REPLACE "/" "\\" ${_outPathVariable} "${_inPath}") else() # use CMake path literally set (${_outPathVariable} "${_inPath}") endif() set (${_outPathVariable} "${${_outPathVariable}}" PARENT_SCOPE) endfunction() set (_buildDocu TRUE) # handle CHECK_TIMESTAMPS option if (CHECK_TIMESTAMPS AND EXISTS "${DOCU_OUTPUT_DIRECTORY}") if (DOCU_TYPE MATCHES "[Nn]otebook") file (GLOB_RECURSE _docuSourceNBs RELATIVE "${DOCU_INPUT_DIRECTORY}" "${DOCU_INPUT_DIRECTORY}/*.nb") file (GLOB_RECURSE _docuBinaryNBs RELATIVE "${DOCU_OUTPUT_DIRECTORY}" "${DOCU_OUTPUT_DIRECTORY}/*.nb") if (_docuBinaryNBs) set (_docuNBs ${_docuSourceNBs} ${_docuBinaryNBs}) list (REMOVE_DUPLICATES _docuNBs) set (_docuNBTrigger "") foreach (_docuNB ${_docuNBs}) if ("${DOCU_INPUT_DIRECTORY}/${_docuNB}" IS_NEWER_THAN "${DOCU_OUTPUT_DIRECTORY}/${_docuNB}") list (APPEND _docuNBTrigger "${_docuNB}") list (LENGTH _docuNBTrigger _len) if (_len GREATER 10) # stop if many out-of-date files have been found break() endif() endif() endforeach() if (_docuNBTrigger) message (STATUS "Out-of-date ${DOCU_APP_NAME} Mathematica documentation notebooks: ${_docuNBTrigger}") else() message (STATUS "Built ${DOCU_APP_NAME} Mathematica ${DOCU_TYPE} documentation is up-to-date") set (_buildDocu FALSE) endif() endif() elseif (DOCU_TYPE MATCHES "HTML") file (GLOB_RECURSE _docuSourceNBs RELATIVE "${DOCU_INPUT_DIRECTORY}" "${DOCU_INPUT_DIRECTORY}/*.nb") file (GLOB_RECURSE _docuBinaryHTMLs RELATIVE "${DOCU_OUTPUT_DIRECTORY}" "${DOCU_OUTPUT_DIRECTORY}/*.html") if (_docuBinaryHTMLs) set (_docuNBTrigger "") foreach (_docuNB ${_docuSourceNBs}) get_filename_component(_docuBaseName "${_docuNB}" NAME_WE) string (REGEX MATCHALL "[^;]+/${_docuBaseName}\\.html" _docuHTMLNames "${_docuBinaryHTMLs}") if (_docuHTMLNames) foreach (_docuHTMLName ${_docuHTMLNames}) if ("${DOCU_INPUT_DIRECTORY}/${_docuNB}" IS_NEWER_THAN "${DOCU_OUTPUT_DIRECTORY}/${_docuHTMLName}") list (APPEND _docuNBTrigger "${_docuNB}") break() endif() endforeach() else() # no corresponding HTML document list (APPEND _docuNBTrigger "${_docuNB}") endif() list (LENGTH _docuNBTrigger _len) if (_len GREATER 10) # stop if many out-of-date files have been found break() endif() endforeach() if (_docuNBTrigger) message (STATUS "Out-of-date ${DOCU_APP_NAME} Mathematica documentation notebooks: ${_docuNBTrigger}") else() message (STATUS "Built ${DOCU_APP_NAME} Mathematica ${DOCU_TYPE} documentation is up-to-date") set (_buildDocu FALSE) endif() endif() endif() endif() if (_buildDocu) # clean previously built documentation files file (REMOVE_RECURSE "${DOCU_OUTPUT_DIRECTORY}") file (MAKE_DIRECTORY "${DOCU_OUTPUT_DIRECTORY}") get_filename_component(_appPath "${Mathematica_DocumentationBuild_PACKAGE_DIR}" DIRECTORY) string (TOLOWER "${DOCU_TYPE}.xml" _buildFileName) set (_buildFile "${Mathematica_DocumentationBuild_PACKAGE_DIR}/SystemFiles/ant/Build/${_buildFileName}") _to_native_path ("${Mathematica_ANT_EXECUTABLE}" _antExecutableNative) _to_native_path ("${_buildFile}" _buildFileNative) _to_native_path ("${_appPath}" _appPathNative) _to_native_path ("${Mathematica_KERNEL_EXECUTABLE}" _kernelExecutableNative) _to_native_path ("${Mathematica_JLink_PACKAGE_DIR}" _jlinkPathNative) _to_native_path ("${DOCU_INPUT_DIRECTORY}" _inputDirNative) _to_native_path ("${DOCU_OUTPUT_DIRECTORY}" _outputDirNative) set (_cmd COMMAND "${_antExecutableNative}") list (APPEND _cmd "-buildfile" "${_buildFileNative}") list (APPEND _cmd "-DappPath=${_appPathNative}") list (APPEND _cmd "-Dapp.name=${DOCU_APP_NAME}") list (APPEND _cmd "-DmathExe=${_kernelExecutableNative}") list (APPEND _cmd "-Djlinkpath=${_jlinkPathNative}") list (APPEND _cmd "-DinputDir=${_inputDirNative}") list (APPEND _cmd "-DoutputDir=${_outputDirNative}") list (APPEND _cmd "-Dlanguage=${DOCU_LANGUAGE}") list (APPEND _cmd "-DincludeLinkTrails=False") list (APPEND _cmd "-Dlocal=True") list (APPEND _cmd "-DcompleteHTMLQ=True") if (Mathematica_DEBUG) list (APPEND _cmd "-Ddebug=True") else() list (APPEND _cmd "-Ddebug=False") endif() list (APPEND _cmd RESULT_VARIABLE _result) if (Mathematica_DEBUG) message (STATUS "execute_process: ${_cmd}") endif() execute_process(${_cmd}) message(STATUS "${DOCU_APP_NAME} ${DOCU_TYPE} generation result=${_result}") endif() ================================================ FILE: cmake/modules/FindMathematicaTestDriver.cmd ================================================ @echo off rem FindMathematica test driver script for Windows setlocal enabledelayedexpansion rem echo !CMDCMDLINE! rem echo !PATH! set "TEST_NAME=%~1" set "TEST_CONFIGURATION=%~2" set "TEST_INPUT_OPTION=%~3" if "!TEST_INPUT_OPTION!" == "input" ( set "TEST_INPUT=%~4" set "TEST_EXECUTABLE=%~5" shift shift shift shift shift shift ) else if "!TEST_INPUT_OPTION!" == "inputfile" ( set "TEST_INPUT_FILE=%~4" set "TEST_EXECUTABLE=%~5" shift shift shift shift shift shift ) else ( set "TEST_EXECUTABLE=%~4" shift shift shift shift shift ) if "!TEST_INPUT_OPTION!" == "input" ( echo !TEST_INPUT! | "!TEST_EXECUTABLE!" %0 %1 %2 %3 %4 %5 %6 %7 %8 %9 ) else if "!TEST_INPUT_OPTION!" == "inputfile" ( "!TEST_EXECUTABLE!" < "!TEST_INPUT_FILE!" %0 %1 %2 %3 %4 %5 %6 %7 %8 %9 ) else ( "!TEST_EXECUTABLE!" %0 %1 %2 %3 %4 %5 %6 %7 %8 %9 ) ================================================ FILE: cmake/modules/FindMathematicaTestDriver.sh ================================================ #!/bin/bash # FindMathematica test driver script for UNIX systems #logger -- $# "$@" #logger -- LD_LIBRARY_PATH=$LD_LIBRARY_PATH #logger -- DYLD_FRAMEWORK_PATH=$DYLD_FRAMEWORK_PATH #logger -- DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH export TEST_NAME=$1 export TEST_CONFIGURATION=$2 export TEST_INPUT_OPTION=$3 if [ "$TEST_INPUT_OPTION" = "input" ] then export TEST_INPUT=$4 export TEST_EXECUTABLE=$5 elif [ "$TEST_INPUT_OPTION" = "inputfile" ] then export TEST_INPUT_FILE=$4 export TEST_EXECUTABLE=$5 else export TEST_EXECUTABLE=$4 fi if [ "$OSTYPE" = "cygwin" ] then # make sure that executable has the right format under Cygwin export TEST_EXECUTABLE="$(/usr/bin/cygpath --unix "$TEST_EXECUTABLE")" fi if [ "$TEST_INPUT_OPTION" = "input" ] then echo "$TEST_INPUT" | exec "$TEST_EXECUTABLE" "${@:6}" elif [ "$TEST_INPUT_OPTION" = "inputfile" ] then exec < "$TEST_INPUT_FILE" "$TEST_EXECUTABLE" "${@:6}" else exec "$TEST_EXECUTABLE" "${@:5}" fi ================================================ FILE: cmake/modules/FindPythonLibsOSX.cmake ================================================ # - Find python libraries # This module finds if Python is installed and determines where the # include files and libraries are. It also determines what the name of # the library is. This code sets the following variables: # # PYTHONLIBS_FOUND - have the Python libs been found # PYTHON_LIBRARIES - path to the python library # PYTHON_INCLUDE_PATH - path to where Python.h is found (deprecated) # PYTHON_INCLUDE_DIRS - path to where Python.h is found # PYTHON_DEBUG_LIBRARIES - path to the debug library (deprecated) # PYTHONLIBS_VERSION_STRING - version of the Python libs found (since CMake 2.8.8) # # The Python_ADDITIONAL_VERSIONS variable can be used to specify a list of # version numbers that should be taken into account when searching for Python. # You need to set this variable before calling find_package(PythonLibs). # # If you'd like to specify the installation of Python to use, you should modify # the following cache variables: # PYTHON_LIBRARY - path to the python library # PYTHON_INCLUDE_DIR - path to where Python.h is found #============================================================================= # Copyright 2001-2009 Kitware, Inc. # # Distributed under the OSI-approved BSD License (the "License"); # see accompanying file Copyright.txt for details. # # This software is distributed WITHOUT ANY WARRANTY; without even the # implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the License for more information. #============================================================================= # (To distribute this file outside of CMake, substitute the full # License text for the above reference.) # Note by Nikolaus Demmel 28.03.2014: My contributions are licensend under the # same as CMake (BSD). My adaptations are in part based # https://github.com/qgis/QGIS/tree/master/cmake which has the following # copyright note: # Copyright (c) 2007, Simon Edwards # Redistribution and use is allowed according to the terms of the BSD license. # For details see the accompanying COPYING-CMAKE-SCRIPTS file. if(NOT DEFINED PYTHON_INCLUDE_DIR) if(DEFINED PYTHON_INCLUDE_PATH) # For backward compatibility, repect PYTHON_INCLUDE_PATH. set(PYTHON_INCLUDE_DIR "${PYTHON_INCLUDE_PATH}" CACHE PATH "Path to where Python.h is found" FORCE) else() set(PYTHON_INCLUDE_DIR "" CACHE PATH "Path to where Python.h is found" FORCE) endif() endif() if(EXISTS "${PYTHON_INCLUDE_DIR}" AND EXISTS "${PYTHON_LIBRARY}") if(EXISTS "${PYTHON_INCLUDE_DIR}/patchlevel.h") file(STRINGS "${PYTHON_INCLUDE_DIR}/patchlevel.h" _PYTHON_VERSION_STR REGEX "^#define[ \t]+PY_VERSION[ \t]+\"[^\"]+\"") string(REGEX REPLACE "^#define[ \t]+PY_VERSION[ \t]+\"([^\"]+)\".*" "\\1" PYTHONLIBS_VERSION_STRING "${_PYTHON_VERSION_STR}") unset(_PYTHON_VERSION_STR) endif() else() set(_PYTHON1_VERSIONS 1.6 1.5) set(_PYTHON2_VERSIONS 2.7 2.6 2.5 2.4 2.3 2.2 2.1 2.0) set(_PYTHON3_VERSIONS 3.4 3.3 3.2 3.1 3.0) unset(_PYTHON_FIND_OTHER_VERSIONS) if(PythonLibs_FIND_VERSION) if(PythonLibs_FIND_VERSION_COUNT GREATER 1) set(_PYTHON_FIND_MAJ_MIN "${PythonLibs_FIND_VERSION_MAJOR}.${PythonLibs_FIND_VERSION_MINOR}") if(NOT PythonLibs_FIND_VERSION_EXACT) foreach(_PYTHON_V ${_PYTHON${PythonLibs_FIND_VERSION_MAJOR}_VERSIONS}) if(NOT _PYTHON_V VERSION_LESS _PYTHON_FIND_MAJ_MIN) if(NOT _PYTHON_V STREQUAL PythonLibs_FIND_VERSION) list(APPEND _PYTHON_FIND_OTHER_VERSIONS ${_PYTHON_V}) endif() endif() endforeach() endif() unset(_PYTHON_FIND_MAJ_MIN) else() set(_PYTHON_FIND_OTHER_VERSIONS ${_PYTHON${PythonLibs_FIND_VERSION_MAJOR}_VERSIONS}) endif() else() # add an empty version to check the `python` executable first in case no version is requested set(_PYTHON_FIND_OTHER_VERSIONS ${_PYTHON3_VERSIONS} ${_PYTHON2_VERSIONS} ${_PYTHON1_VERSIONS}) endif() unset(_PYTHON1_VERSIONS) unset(_PYTHON2_VERSIONS) unset(_PYTHON3_VERSIONS) # Set up the versions we know about, in the order we will search. Always add # the user supplied additional versions to the front. # If FindPythonInterp has already found the major and minor version, # insert that version between the user supplied versions and the stock # version list. # If no specific version is requested or suggested by PythonInterp, always look # for "python" executable first set(_PYTHON_VERSIONS ${PythonLibs_FIND_VERSION} ${PythonLibs_ADDITIONAL_VERSIONS} ) if(DEFINED PYTHON_VERSION_MAJOR AND DEFINED PYTHON_VERSION_MINOR) list(APPEND _PYTHON_VERSIONS ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}) endif() if (NOT _PYTHON_VERSIONS) set(_PYTHON_VERSIONS ";") # empty entry at the front makeing sure we search for "python" first endif() list(APPEND _PYTHON_VERSIONS ${_PYTHON_FIND_OTHER_VERSIONS}) unset(_PYTHON_FIND_OTHER_VERSIONS) message(STATUS "Looking for versions: ${_PYTHON_VERSIONS}") FIND_FILE(_FIND_LIB_PYTHON_PY FindLibPythonOSX.py PATHS ${CMAKE_MODULE_PATH} ${CMAKE_ROOT}/Modules) if(NOT _FIND_LIB_PYTHON_PY) message(FATAL_ERROR "Could not find required file 'FindLibPythonOSX.py'") endif() unset(PYTHONLIBS_VERSION_STRING) foreach(_CURRENT_VERSION IN LISTS _PYTHON_VERSIONS) STRING(REGEX REPLACE "^([0-9]+).*$" "\\1" _VERSION_MAJOR "${_CURRENT_VERSION}") STRING(REGEX REPLACE "^[0-9]+\\.([0-9]+).*$" "\\1" _VERSION_MINOR "${_CURRENT_VERSION}") set(_PYTHON_NAMES python) if (_CURRENT_VERSION MATCHES "^[0-9]+.*$") list(APPEND _PYTHON_NAMES "python${_VERSION_MAJOR}") if (_CURRENT_VERSION MATCHES "^[0-9]+\\.[0-9].*$") list(APPEND _PYTHON_NAMES "python${_VERSION_MAJOR}.${_VERSION_MINOR}") endif() endif() message(STATUS "Looking for python version '${_CURRENT_VERSION}' by checking executables: ${_PYTHON_NAMES}.") foreach(_CURRENT_PYTHON_NAME IN LISTS _PYTHON_NAMES) unset(_PYTHON_EXECUTABLE CACHE) find_program(_PYTHON_EXECUTABLE ${_CURRENT_PYTHON_NAME} PATHS [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]) if(_PYTHON_EXECUTABLE) EXECUTE_PROCESS( COMMAND ${_PYTHON_EXECUTABLE} "${_FIND_LIB_PYTHON_PY}" OUTPUT_VARIABLE _PYTHON_CONFIG RESULT_VARIABLE _PYTHON_CONFIG_RESULT ERROR_QUIET) if(NOT ${_PYTHON_CONFIG_RESULT} AND (NOT ${_PYTHON_CONFIG} STREQUAL "")) STRING(REGEX REPLACE ".*\nmajor_version:([0-9]+).*$" "\\1" _PYTHON_MAJOR_VERSION ${_PYTHON_CONFIG}) STRING(REGEX REPLACE ".*\nminor_version:([0-9]+).*$" "\\1" _PYTHON_MINOR_VERSION ${_PYTHON_CONFIG}) STRING(REGEX REPLACE ".*\npatch_version:([0-9]+).*$" "\\1" _PYTHON_PATCH_VERSION ${_PYTHON_CONFIG}) STRING(REGEX REPLACE ".*\nshort_version:([^\n]+).*$" "\\1" _PYTHON_SHORT_VERSION ${_PYTHON_CONFIG}) STRING(REGEX REPLACE ".*\nlong_version:([^\n]+).*$" "\\1" _PYTHON_LONG_VERSION ${_PYTHON_CONFIG}) STRING(REGEX REPLACE ".*\npy_inc_dir:([^\n]+).*$" "\\1" _PYTHON_INCLUDE_DIR ${_PYTHON_CONFIG}) STRING(REGEX REPLACE ".*\npy_lib_dir:([^\n]+).*$" "\\1" _PYTHON_LIBRARY_DIR ${_PYTHON_CONFIG}) STRING(REGEX REPLACE ".*\nexec_prefix:(^\n+).*$" "\\1" _PYTHON_PREFIX ${_PYTHON_CONFIG}) if ("${_CURRENT_VERSION}" STREQUAL "" OR "${_CURRENT_VERSION}" STREQUAL "${_PYTHON_MAJOR_VERSION}" OR "${_CURRENT_VERSION}" STREQUAL "${_PYTHON_SHORT_VERSION}" OR "${_CURRENT_VERSION}" STREQUAL "${_PYTHON_LONG_VERSION}") message(STATUS "Found executable ${_PYTHON_EXECUTABLE} with suitable version ${_PYTHON_LONG_VERSION}") if(NOT EXISTS "${PYTHON_INCLUDE_DIR}") set(PYTHON_INCLUDE_DIR "${_PYTHON_INCLUDE_DIR}") endif() if(NOT EXISTS "${PYTHON_LIBRARY}") set(_PYTHON_SHORT_VERSION_NO_DOT "${_PYTHON_MAJOR_VERSION}${_PYTHON_MINOR_VERSION}") set(_PYTHON_LIBRARY_NAMES python${_PYTHON_SHORT_VERSION} python${_PYTHON_SHORT_VERSION_NO_DOT}) FIND_LIBRARY(PYTHON_LIBRARY NAMES ${_PYTHON_LIBRARY_NAMES} PATH_SUFFIXES python${_PYTHON_SHORT_VERSION}/config python${_PYTHON_SHORT_VERSION_NO_DOT}/config PATHS ${_PYTHON_LIBRARY_DIR} ${_PYTHON_PREFIX}/lib $ {_PYTHON_PREFIX}/libs NO_DEFAULT_PATH) if(WIN32) find_library(PYTHON_DEBUG_LIBRARY NAMES python${_PYTHON_SHORT_VERSION_NO_DOT}_d python PATHS [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs/Debug [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs/Debug [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs ) endif() endif() set(PYTHONLIBS_VERSION_STRING ${_PYTHON_LONG_VERSION}) if(_PYTHON_PATCH_VERSION STREQUAL "0") # it's called "Python 2.7", not "2.7.0" string(REGEX REPLACE "\\.0$" "" PYTHONLIBS_VERSION_STRING "${PYTHONLIBS_VERSION_STRING}") endif() break() else() message(STATUS "Found executable ${_PYTHON_EXECUTABLE} with UNsuitable version ${_PYTHON_LONG_VERSION}") endif() # version ok else() message(WARNING "Found executable ${_PYTHON_EXECUTABLE}, but could not extract version info.") endif() # could extract config endif() # found executable endforeach() # python names if (PYTHONLIBS_VERSION_STRING) break() endif() endforeach() # python versions endif() unset(_PYTHON_NAMES) unset(_PYTHON_VERSIONS) unset(_PYTHON_EXECUTABLE CACHE) unset(_PYTHON_MAJOR_VERSION) unset(_PYTHON_MINOR_VERSION) unset(_PYTHON_PATCH_VERSION) unset(_PYTHON_SHORT_VERSION) unset(_PYTHON_LONG_VERSION) unset(_PYTHON_LIBRARY_DIR) unset(_PYTHON_INCLUDE_DIR) unset(_PYTHON_PREFIX) unset(_PYTHON_SHORT_VERSION_NO_DOT) unset(_PYTHON_LIBRARY_NAMES) # For backward compatibility, set PYTHON_INCLUDE_PATH. set(PYTHON_INCLUDE_PATH "${PYTHON_INCLUDE_DIR}") mark_as_advanced( PYTHON_DEBUG_LIBRARY PYTHON_LIBRARY PYTHON_INCLUDE_DIR ) # We use PYTHON_INCLUDE_DIR, PYTHON_LIBRARY and PYTHON_DEBUG_LIBRARY for the # cache entries because they are meant to specify the location of a single # library. We now set the variables listed by the documentation for this # module. set(PYTHON_INCLUDE_DIRS "${PYTHON_INCLUDE_DIR}") set(PYTHON_DEBUG_LIBRARIES "${PYTHON_DEBUG_LIBRARY}") # These variables have been historically named in this module different from # what SELECT_LIBRARY_CONFIGURATIONS() expects. set(PYTHON_LIBRARY_DEBUG "${PYTHON_DEBUG_LIBRARY}") set(PYTHON_LIBRARY_RELEASE "${PYTHON_LIBRARY}") #include(${CMAKE_CURRENT_LIST_DIR}/SelectLibraryConfigurations.cmake) include(SelectLibraryConfigurations) SELECT_LIBRARY_CONFIGURATIONS(PYTHON) # SELECT_LIBRARY_CONFIGURATIONS() sets ${PREFIX}_FOUND if it has a library. # Unset this, this prefix doesn't match the module prefix, they are different # for historical reasons. unset(PYTHON_FOUND) #include(${CMAKE_CURRENT_LIST_DIR}/FindPackageHandleStandardArgs.cmake) include(FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS(PythonLibs REQUIRED_VARS PYTHON_LIBRARIES PYTHON_INCLUDE_DIRS VERSION_VAR PYTHONLIBS_VERSION_STRING) # PYTHON_ADD_MODULE( src1 src2 ... srcN) is used to build modules for python. # PYTHON_WRITE_MODULES_HEADER() writes a header file you can include # in your sources to initialize the static python modules function(PYTHON_ADD_MODULE _NAME ) get_property(_TARGET_SUPPORTS_SHARED_LIBS GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS) option(PYTHON_ENABLE_MODULE_${_NAME} "Add module ${_NAME}" TRUE) option(PYTHON_MODULE_${_NAME}_BUILD_SHARED "Add module ${_NAME} shared" ${_TARGET_SUPPORTS_SHARED_LIBS}) # Mark these options as advanced mark_as_advanced(PYTHON_ENABLE_MODULE_${_NAME} PYTHON_MODULE_${_NAME}_BUILD_SHARED) if(PYTHON_ENABLE_MODULE_${_NAME}) if(PYTHON_MODULE_${_NAME}_BUILD_SHARED) set(PY_MODULE_TYPE MODULE) else() set(PY_MODULE_TYPE STATIC) set_property(GLOBAL APPEND PROPERTY PY_STATIC_MODULES_LIST ${_NAME}) endif() set_property(GLOBAL APPEND PROPERTY PY_MODULES_LIST ${_NAME}) add_library(${_NAME} ${PY_MODULE_TYPE} ${ARGN}) # target_link_libraries(${_NAME} ${PYTHON_LIBRARIES}) if(PYTHON_MODULE_${_NAME}_BUILD_SHARED) set_target_properties(${_NAME} PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}") if(WIN32 AND NOT CYGWIN) set_target_properties(${_NAME} PROPERTIES SUFFIX ".pyd") endif() endif() endif() endfunction() function(PYTHON_WRITE_MODULES_HEADER _filename) get_property(PY_STATIC_MODULES_LIST GLOBAL PROPERTY PY_STATIC_MODULES_LIST) get_filename_component(_name "${_filename}" NAME) string(REPLACE "." "_" _name "${_name}") string(TOUPPER ${_name} _nameUpper) set(_filename ${CMAKE_CURRENT_BINARY_DIR}/${_filename}) set(_filenameTmp "${_filename}.in") file(WRITE ${_filenameTmp} "/*Created by cmake, do not edit, changes will be lost*/\n") file(APPEND ${_filenameTmp} "#ifndef ${_nameUpper} #define ${_nameUpper} #include #ifdef __cplusplus extern \"C\" { #endif /* __cplusplus */ ") foreach(_currentModule ${PY_STATIC_MODULES_LIST}) file(APPEND ${_filenameTmp} "extern void init${PYTHON_MODULE_PREFIX}${_currentModule}(void);\n\n") endforeach() file(APPEND ${_filenameTmp} "#ifdef __cplusplus } #endif /* __cplusplus */ ") foreach(_currentModule ${PY_STATIC_MODULES_LIST}) file(APPEND ${_filenameTmp} "int ${_name}_${_currentModule}(void) \n{\n static char name[]=\"${PYTHON_MODULE_PREFIX}${_currentModule}\"; return PyImport_AppendInittab(name, init${PYTHON_MODULE_PREFIX}${_currentModule});\n}\n\n") endforeach() file(APPEND ${_filenameTmp} "void ${_name}_LoadAllPythonModules(void)\n{\n") foreach(_currentModule ${PY_STATIC_MODULES_LIST}) file(APPEND ${_filenameTmp} " ${_name}_${_currentModule}();\n") endforeach() file(APPEND ${_filenameTmp} "}\n\n") file(APPEND ${_filenameTmp} "#ifndef EXCLUDE_LOAD_ALL_FUNCTION\nvoid CMakeLoadAllPythonModules(void)\n{\n ${_name}_LoadAllPythonModules();\n}\n#endif\n\n#endif\n") # with configure_file() cmake complains that you may not use a file created using file(WRITE) as input file for configure_file() execute_process(COMMAND ${CMAKE_COMMAND} -E copy_if_different "${_filenameTmp}" "${_filename}" OUTPUT_QUIET ERROR_QUIET) endfunction() ================================================ FILE: cmake/modules/FindSQLITE3.cmake ================================================ # Copyright (C) 2007-2009 LuaDist. # Created by Peter Kapec # Redistribution and use of this file is allowed according to the terms of the MIT license. # For details see the COPYRIGHT file distributed with LuaDist. # Note: # Searching headers and libraries is very simple and is NOT as powerful as scripts # distributed with CMake, because LuaDist defines directories to search for. # Everyone is encouraged to contact the author with improvements. Maybe this file # becomes part of CMake distribution sometimes. # - Find sqlite3 # Find the native SQLITE3 headers and libraries. # # SQLITE3_INCLUDE_DIR - where to find sqlite3.h, etc. # SQLITE3_LIBRARIES - List of libraries when using sqlite. # SQLITE3_FOUND - True if sqlite found. if(WIN32) # Look for the header file. FIND_PATH(SQLITE3_INCLUDE_DIR NAMES sqlite3.h) # Look for the library. FIND_LIBRARY(SQLITE3_LIBRARY NAMES sqlite3) else() find_package(PkgConfig REQUIRED) pkg_check_modules(SQLITE3 REQUIRED sqlite3) message("-- Found sqlite3 library path ${SQLITE3_LIBRARIES}") message("-- Found sqlite3 include path ${SQLITE3_INCLUDE_DIRS}") set(SQLITE3_LIBRARY ${SQLITE3_LIBRARIES}) set(SQLITE3_INCLUDE_DIR ${SQLITE3_INCLUDE_DIR}) set(SQLITE3_FOUND) endif() # Handle the QUIETLY and REQUIRED arguments and set SQLITE3_FOUND to TRUE if all listed variables are TRUE. INCLUDE(FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS(SQLITE3 DEFAULT_MSG SQLITE3_LIBRARY) # SQLITE3_INCLUDE_DIR) # Copy the results to the output variables. IF(SQLITE3_FOUND) SET(SQLITE3_LIBRARIES ${SQLITE3_LIBRARY}) SET(SQLITE3_INCLUDE_DIR ${SQLITE3_INCLUDE_DIR}) ELSE(SQLITE3_FOUND) SET(SQLITE3_LIBRARIES) SET(SQLITE3_INCLUDE_DIR) ENDIF(SQLITE3_FOUND) MARK_AS_ADVANCED(SQLITE3_INCLUDE_DIR SQLITE3_LIBRARIES) ================================================ FILE: cmake/modules/FindZeroMQ.cmake ================================================ # - Find zeromq libraries # This module finds zeromq if it is installed and determines where the # include files and libraries are. It also determines what the name of # the library is. This code sets the following variables: # # ZEROMQ_FOUND - have the zeromq libs been found # ZEROMQ_LIBRARIES - path to the zeromq library # ZEROMQ_INCLUDE_DIRS - path to where zmq.h is found # ZEROMQ_DEBUG_LIBRARIES - path to the debug library #INCLUDE(CMakeFindFrameworks) # Search for the zeromq framework on Apple. #CMAKE_FIND_FRAMEWORKS(ZeroMQ) IF(WIN32) FIND_LIBRARY(ZEROMQ_DEBUG_LIBRARY NAMES libzmq_d zmq_d PATHS ${ZEROMQ_LIBRARIES} ) ENDIF(WIN32) FIND_LIBRARY(ZEROMQ_LIBRARY NAMES libzmq zmq PATHS ${ZEROMQ_LIBRARIES} ${NSCP_LIBRARYDIR} ) # IF(ZeroMQ_FRAMEWORKS AND NOT ZEROMQ_INCLUDE_DIR) # FOREACH(dir ${ZeroMQ_FRAMEWORKS}) # SET(ZEROMQ_FRAMEWORK_INCLUDES ${ZEROMQ_FRAMEWORK_INCLUDES} # ${dir}/Versions/${_CURRENT_VERSION}/include/zeromq${_CURRENT_VERSION}) # ENDFOREACH(dir) # ENDIF(ZeroMQ_FRAMEWORKS AND NOT ZEROMQ_INCLUDE_DIR) FIND_PATH(ZEROMQ_INCLUDE_DIR NAMES zmq.hpp PATHS # ${ZEROMQ_FRAMEWORK_INCLUDES} ${ZEROMQ_INCLUDE_DIRS} ${NSCP_INCLUDEDIR} ${ZEROMQ_INCLUDE_DIR} ) MARK_AS_ADVANCED( ZEROMQ_DEBUG_LIBRARY ZEROMQ_LIBRARY ZEROMQ_INCLUDE_DIR ) SET(ZEROMQ_INCLUDE_DIRS "${ZEROMQ_INCLUDE_DIR}") SET(ZEROMQ_LIBRARIES "${ZEROMQ_LIBRARY}") SET(ZEROMQ_DEBUG_LIBRARIES "${ZEROMQ_DEBUG_LIBRARY}") INCLUDE(FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS(ZeroMQ DEFAULT_MSG ZEROMQ_LIBRARIES ZEROMQ_INCLUDE_DIRS) ================================================ FILE: cmake/modules/cotire.cmake ================================================ # - cotire (compile time reducer) # # See the cotire manual for usage hints. # #============================================================================= # Copyright 2012-2014 Sascha Kratky # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. #============================================================================= if(__COTIRE_INCLUDED) return() endif() set(__COTIRE_INCLUDED TRUE) # call cmake_minimum_required, but prevent modification of the CMake policy stack in include mode # cmake_minimum_required also sets the policy version as a side effect, which we have to avoid if (NOT CMAKE_SCRIPT_MODE_FILE) cmake_policy(PUSH) endif() # we need the CMake variables CMAKE_SCRIPT_MODE_FILE and CMAKE_ARGV available since 2.8.5 # we need APPEND_STRING option for set_property available since 2.8.6 cmake_minimum_required(VERSION 2.8.6) if (NOT CMAKE_SCRIPT_MODE_FILE) cmake_policy(POP) endif() if (NOT CMAKE_VERSION VERSION_LESS "3.1.0") # include TARGET_OBJECTS expressions in a target's SOURCES property cmake_policy(SET CMP0051 NEW) # only interpret if() arguments as variables or keywords when unquoted cmake_policy(SET CMP0054 NEW) endif() set (COTIRE_CMAKE_MODULE_FILE "${CMAKE_CURRENT_LIST_FILE}") set (COTIRE_CMAKE_MODULE_VERSION "1.6.9") include(CMakeParseArguments) include(ProcessorCount) function (cotire_determine_compiler_version _language _versionPrefix) if (NOT ${_versionPrefix}_VERSION) # use CMake's predefined compiler version variable (available since CMake 2.8.8) if (DEFINED CMAKE_${_language}_COMPILER_VERSION) set (${_versionPrefix}_VERSION "${CMAKE_${_language}_COMPILER_VERSION}") elseif (WIN32) # cl.exe messes with the output streams unless the environment variable VS_UNICODE_OUTPUT is cleared unset (ENV{VS_UNICODE_OUTPUT}) string (STRIP "${CMAKE_${_language}_COMPILER_ARG1}" _compilerArg1) execute_process ( COMMAND ${CMAKE_${_language}_COMPILER} ${_compilerArg1} ERROR_VARIABLE _versionLine OUTPUT_QUIET TIMEOUT 10) string (REGEX REPLACE ".*Version *([0-9]+(\\.[0-9]+)*).*" "\\1" ${_versionPrefix}_VERSION "${_versionLine}") else() # assume GCC like command line interface string (STRIP "${CMAKE_${_language}_COMPILER_ARG1}" _compilerArg1) execute_process ( COMMAND ${CMAKE_${_language}_COMPILER} ${_compilerArg1} "-dumpversion" OUTPUT_VARIABLE ${_versionPrefix}_VERSION RESULT_VARIABLE _result OUTPUT_STRIP_TRAILING_WHITESPACE TIMEOUT 10) if (_result) set (${_versionPrefix}_VERSION "") endif() endif() if (${_versionPrefix}_VERSION) set (${_versionPrefix}_VERSION "${${_versionPrefix}_VERSION}" CACHE INTERNAL "${_language} compiler version") endif() set (${_versionPrefix}_VERSION "${${_versionPrefix}_VERSION}" PARENT_SCOPE) if (COTIRE_DEBUG) message (STATUS "${CMAKE_${_language}_COMPILER} version ${${_versionPrefix}_VERSION}") endif() endif() endfunction() function (cotire_get_configuration_types _configsVar) set (_configs "") if (CMAKE_CONFIGURATION_TYPES) list (APPEND _configs ${CMAKE_CONFIGURATION_TYPES}) endif() if (CMAKE_BUILD_TYPE) list (APPEND _configs "${CMAKE_BUILD_TYPE}") endif() if (_configs) list (REMOVE_DUPLICATES _configs) set (${_configsVar} ${_configs} PARENT_SCOPE) else() set (${_configsVar} "None" PARENT_SCOPE) endif() endfunction() function (cotire_get_source_file_extension _sourceFile _extVar) # get_filename_component returns extension from first occurrence of . in file name # this function computes the extension from last occurrence of . in file name string (FIND "${_sourceFile}" "." _index REVERSE) if (_index GREATER -1) math (EXPR _index "${_index} + 1") string (SUBSTRING "${_sourceFile}" ${_index} -1 _sourceExt) else() set (_sourceExt "") endif() set (${_extVar} "${_sourceExt}" PARENT_SCOPE) endfunction() macro (cotire_check_is_path_relative_to _path _isRelativeVar) set (${_isRelativeVar} FALSE) if (IS_ABSOLUTE "${_path}") foreach (_dir ${ARGN}) file (RELATIVE_PATH _relPath "${_dir}" "${_path}") if (NOT _relPath OR (NOT IS_ABSOLUTE "${_relPath}" AND NOT "${_relPath}" MATCHES "^\\.\\.")) set (${_isRelativeVar} TRUE) break() endif() endforeach() endif() endmacro() function (cotire_filter_language_source_files _language _sourceFilesVar _excludedSourceFilesVar _cotiredSourceFilesVar) set (_sourceFiles "") set (_excludedSourceFiles "") set (_cotiredSourceFiles "") if (CMAKE_${_language}_SOURCE_FILE_EXTENSIONS) set (_languageExtensions "${CMAKE_${_language}_SOURCE_FILE_EXTENSIONS}") else() set (_languageExtensions "") endif() if (CMAKE_${_language}_IGNORE_EXTENSIONS) set (_ignoreExtensions "${CMAKE_${_language}_IGNORE_EXTENSIONS}") else() set (_ignoreExtensions "") endif() if (COTIRE_UNITY_SOURCE_EXCLUDE_EXTENSIONS) set (_excludeExtensions "${COTIRE_UNITY_SOURCE_EXCLUDE_EXTENSIONS}") else() set (_excludeExtensions "") endif() if (COTIRE_DEBUG) message (STATUS "${_language} source file extensions: ${_languageExtensions}") message (STATUS "${_language} ignore extensions: ${_ignoreExtensions}") message (STATUS "${_language} exclude extensions: ${_excludeExtensions}") endif() if (CMAKE_VERSION VERSION_LESS "3.1.0") set (_allSourceFiles ${ARGN}) else() # as of CMake 3.1 target sources may contain generator expressions # since we cannot obtain required property information about source files added # through generator expressions at configure time, we filter them out string (GENEX_STRIP "${ARGN}" _allSourceFiles) endif() foreach (_sourceFile ${_allSourceFiles}) get_source_file_property(_sourceIsHeaderOnly "${_sourceFile}" HEADER_FILE_ONLY) get_source_file_property(_sourceIsExternal "${_sourceFile}" EXTERNAL_OBJECT) get_source_file_property(_sourceIsSymbolic "${_sourceFile}" SYMBOLIC) get_source_file_property(_sourceLanguage "${_sourceFile}" LANGUAGE) set (_sourceIsFiltered FALSE) if (NOT _sourceIsHeaderOnly AND NOT _sourceIsExternal AND NOT _sourceIsSymbolic) cotire_get_source_file_extension("${_sourceFile}" _sourceExt) if (_sourceExt) list (FIND _ignoreExtensions "${_sourceExt}" _ignoreIndex) if (_ignoreIndex LESS 0) list (FIND _excludeExtensions "${_sourceExt}" _excludeIndex) if (_excludeIndex GREATER -1) list (APPEND _excludedSourceFiles "${_sourceFile}") else() list (FIND _languageExtensions "${_sourceExt}" _sourceIndex) if (_sourceIndex GREATER -1) set (_sourceIsFiltered TRUE) elseif ("${_sourceLanguage}" STREQUAL "${_language}") # add to excluded sources, if file is not ignored and has correct language without having the correct extension list (APPEND _excludedSourceFiles "${_sourceFile}") endif() endif() endif() endif() endif() if (COTIRE_DEBUG) message (STATUS "${_sourceFile} filtered=${_sourceIsFiltered} language=${_sourceLanguage} header=${_sourceIsHeaderOnly}") endif() if (_sourceIsFiltered) get_source_file_property(_sourceIsExcluded "${_sourceFile}" COTIRE_EXCLUDED) get_source_file_property(_sourceIsCotired "${_sourceFile}" COTIRE_TARGET) get_source_file_property(_sourceCompileFlags "${_sourceFile}" COMPILE_FLAGS) if (COTIRE_DEBUG) message (STATUS "${_sourceFile} excluded=${_sourceIsExcluded} cotired=${_sourceIsCotired} compileFlags=${_sourceCompileFlags}") endif() if (_sourceIsCotired) list (APPEND _cotiredSourceFiles "${_sourceFile}") elseif (_sourceIsExcluded OR _sourceCompileFlags) list (APPEND _excludedSourceFiles "${_sourceFile}") else() list (APPEND _sourceFiles "${_sourceFile}") endif() endif() endforeach() if (COTIRE_DEBUG) message (STATUS "All: ${ARGN}") message (STATUS "${_language}: ${_sourceFiles}") message (STATUS "Excluded: ${_excludedSourceFiles}") message (STATUS "Cotired: ${_cotiredSourceFiles}") endif() set (${_sourceFilesVar} ${_sourceFiles} PARENT_SCOPE) set (${_excludedSourceFilesVar} ${_excludedSourceFiles} PARENT_SCOPE) set (${_cotiredSourceFilesVar} ${_cotiredSourceFiles} PARENT_SCOPE) endfunction() function (cotire_get_objects_with_property_on _filteredObjectsVar _property _type) set (_filteredObjects "") foreach (_object ${ARGN}) get_property(_isSet ${_type} "${_object}" PROPERTY ${_property} SET) if (_isSet) get_property(_propertyValue ${_type} "${_object}" PROPERTY ${_property}) if (_propertyValue) list (APPEND _filteredObjects "${_object}") endif() endif() endforeach() set (${_filteredObjectsVar} ${_filteredObjects} PARENT_SCOPE) endfunction() function (cotire_get_objects_with_property_off _filteredObjectsVar _property _type) set (_filteredObjects "") foreach (_object ${ARGN}) get_property(_isSet ${_type} "${_object}" PROPERTY ${_property} SET) if (_isSet) get_property(_propertyValue ${_type} "${_object}" PROPERTY ${_property}) if (NOT _propertyValue) list (APPEND _filteredObjects "${_object}") endif() endif() endforeach() set (${_filteredObjectsVar} ${_filteredObjects} PARENT_SCOPE) endfunction() function (cotire_get_source_file_property_values _valuesVar _property) set (_values "") foreach (_sourceFile ${ARGN}) get_source_file_property(_propertyValue "${_sourceFile}" ${_property}) if (_propertyValue) list (APPEND _values "${_propertyValue}") endif() endforeach() set (${_valuesVar} ${_values} PARENT_SCOPE) endfunction() function (cotire_resolve_config_properites _configurations _propertiesVar) set (_properties "") foreach (_property ${ARGN}) if ("${_property}" MATCHES "") foreach (_config ${_configurations}) string (TOUPPER "${_config}" _upperConfig) string (REPLACE "" "${_upperConfig}" _configProperty "${_property}") list (APPEND _properties ${_configProperty}) endforeach() else() list (APPEND _properties ${_property}) endif() endforeach() set (${_propertiesVar} ${_properties} PARENT_SCOPE) endfunction() function (cotire_copy_set_properites _configurations _type _source _target) cotire_resolve_config_properites("${_configurations}" _properties ${ARGN}) foreach (_property ${_properties}) get_property(_isSet ${_type} ${_source} PROPERTY ${_property} SET) if (_isSet) get_property(_propertyValue ${_type} ${_source} PROPERTY ${_property}) set_property(${_type} ${_target} PROPERTY ${_property} "${_propertyValue}") endif() endforeach() endfunction() function (cotire_get_target_link_libraries_for_usage_requirements _target _targetLinkLibrariesVar) set (_targetLinkLibraries "") get_target_property(_librariesToProcess ${_target} LINK_LIBRARIES) while (_librariesToProcess) # remove from head list (GET _librariesToProcess 0 _library) list (REMOVE_AT _librariesToProcess 0) list (FIND _targetLinkLibraries ${_library} _index) if (_index LESS 0) list (APPEND _targetLinkLibraries ${_library}) # process transitive libraries if (TARGET ${_library}) get_target_property(_libraries ${_library} INTERFACE_LINK_LIBRARIES) if (_libraries) list (APPEND _librariesToProcess ${_libraries}) endif() endif() endif() endwhile() set (${_targetLinkLibrariesVar} ${_targetLinkLibraries} PARENT_SCOPE) endfunction() function (cotire_filter_compile_flags _language _flagFilter _matchedOptionsVar _unmatchedOptionsVar) if (WIN32 AND CMAKE_${_language}_COMPILER_ID MATCHES "MSVC|Intel") set (_flagPrefix "[/-]") else() set (_flagPrefix "--?") endif() set (_optionFlag "") set (_matchedOptions "") set (_unmatchedOptions "") foreach (_compileFlag ${ARGN}) if (_compileFlag) if (_optionFlag AND NOT "${_compileFlag}" MATCHES "^${_flagPrefix}") # option with separate argument list (APPEND _matchedOptions "${_compileFlag}") set (_optionFlag "") elseif ("${_compileFlag}" MATCHES "^(${_flagPrefix})(${_flagFilter})$") # remember option set (_optionFlag "${CMAKE_MATCH_2}") elseif ("${_compileFlag}" MATCHES "^(${_flagPrefix})(${_flagFilter})(.+)$") # option with joined argument list (APPEND _matchedOptions "${CMAKE_MATCH_3}") set (_optionFlag "") else() # flush remembered option if (_optionFlag) list (APPEND _matchedOptions "${_optionFlag}") set (_optionFlag "") endif() # add to unfiltered options list (APPEND _unmatchedOptions "${_compileFlag}") endif() endif() endforeach() if (_optionFlag) list (APPEND _matchedOptions "${_optionFlag}") endif() if (COTIRE_DEBUG) message (STATUS "Filter ${_flagFilter}") if (_matchedOptions) message (STATUS "Matched ${_matchedOptions}") endif() if (_unmatchedOptions) message (STATUS "Unmatched ${_unmatchedOptions}") endif() endif() set (${_matchedOptionsVar} ${_matchedOptions} PARENT_SCOPE) set (${_unmatchedOptionsVar} ${_unmatchedOptions} PARENT_SCOPE) endfunction() function (cotire_get_target_compile_flags _config _language _directory _target _flagsVar) string (TOUPPER "${_config}" _upperConfig) # collect options from CMake language variables set (_compileFlags "") if (CMAKE_${_language}_FLAGS) set (_compileFlags "${_compileFlags} ${CMAKE_${_language}_FLAGS}") endif() if (CMAKE_${_language}_FLAGS_${_upperConfig}) set (_compileFlags "${_compileFlags} ${CMAKE_${_language}_FLAGS_${_upperConfig}}") endif() if (_target) # add option from CMake target type variable get_target_property(_targetType ${_target} TYPE) if (POLICY CMP0018) # handle POSITION_INDEPENDENT_CODE property introduced with CMake 2.8.9 if policy CMP0018 is turned on cmake_policy(GET CMP0018 _PIC_Policy) else() # default to old behavior set (_PIC_Policy "OLD") endif() if (COTIRE_DEBUG) message(STATUS "CMP0018=${_PIC_Policy}") endif() if (_PIC_Policy STREQUAL "NEW") # NEW behavior: honor the POSITION_INDEPENDENT_CODE target property get_target_property(_targetPIC ${_target} POSITION_INDEPENDENT_CODE) if (_targetPIC) if (_targetType STREQUAL "EXECUTABLE" AND CMAKE_${_language}_COMPILE_OPTIONS_PIE) set (_compileFlags "${_compileFlags} ${CMAKE_${_language}_COMPILE_OPTIONS_PIE}") elseif (CMAKE_${_language}_COMPILE_OPTIONS_PIC) set (_compileFlags "${_compileFlags} ${CMAKE_${_language}_COMPILE_OPTIONS_PIC}") endif() endif() else() # OLD behavior or policy not set: use the value of CMAKE_SHARED_LIBRARY__FLAGS if (_targetType STREQUAL "MODULE_LIBRARY") # flags variable for module library uses different name SHARED_MODULE # (e.g., CMAKE_SHARED_MODULE_C_FLAGS) set (_targetType SHARED_MODULE) endif() if (CMAKE_${_targetType}_${_language}_FLAGS) set (_compileFlags "${_compileFlags} ${CMAKE_${_targetType}_${_language}_FLAGS}") endif() endif() endif() if (_directory) # add_definitions may have been used to add flags to the compiler command get_directory_property(_dirDefinitions DIRECTORY "${_directory}" DEFINITIONS) if (_dirDefinitions) set (_compileFlags "${_compileFlags} ${_dirDefinitions}") endif() endif() if (_target) # add target compile options get_target_property(_targetflags ${_target} COMPILE_FLAGS) if (_targetflags) set (_compileFlags "${_compileFlags} ${_targetflags}") endif() get_target_property(_targetOptions ${_target} COMPILE_OPTIONS) if (_targetOptions) set (_compileFlags "${_compileFlags} ${_targetOptions}") endif() # interface compile options from linked library targets cotire_get_target_link_libraries_for_usage_requirements(${_target} _linkLibraries) foreach (_library ${_linkLibraries}) if (TARGET ${_library}) get_target_property(_targetOptions ${_library} INTERFACE_COMPILE_OPTIONS) if (_targetOptions) set (_compileFlags "${_compileFlags} ${_targetOptions}") endif() endif() endforeach() endif() if (UNIX) separate_arguments(_compileFlags UNIX_COMMAND "${_compileFlags}") elseif(WIN32) separate_arguments(_compileFlags WINDOWS_COMMAND "${_compileFlags}") else() separate_arguments(_compileFlags) endif() # platform specific flags if (APPLE) get_target_property(_architectures ${_target} OSX_ARCHITECTURES_${_upperConfig}) if (NOT _architectures) get_target_property(_architectures ${_target} OSX_ARCHITECTURES) endif() if (_architectures) foreach (_arch ${_architectures}) list (APPEND _compileFlags "-arch" "${_arch}") endforeach() endif() if (CMAKE_OSX_SYSROOT) if (CMAKE_${_language}_SYSROOT_FLAG) list (APPEND _compileFlags "${CMAKE_${_language}_SYSROOT_FLAG}" "${CMAKE_OSX_SYSROOT}") else() list (APPEND _compileFlags "-isysroot" "${CMAKE_OSX_SYSROOT}") endif() endif() if (CMAKE_OSX_DEPLOYMENT_TARGET) if (CMAKE_${_language}_OSX_DEPLOYMENT_TARGET_FLAG) list (APPEND _compileFlags "${CMAKE_${_language}_OSX_DEPLOYMENT_TARGET_FLAG}${CMAKE_OSX_DEPLOYMENT_TARGET}") else() list (APPEND _compileFlags "-mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET}") endif() endif() endif() if (COTIRE_DEBUG AND _compileFlags) message (STATUS "Target ${_target} compile flags ${_compileFlags}") endif() set (${_flagsVar} ${_compileFlags} PARENT_SCOPE) endfunction() function (cotire_get_target_include_directories _config _language _targetSourceDir _targetBinaryDir _target _includeDirsVar _systemIncludeDirsVar) set (_includeDirs "") set (_systemIncludeDirs "") # default include dirs if (CMAKE_INCLUDE_CURRENT_DIR) list (APPEND _includeDirs "${_targetBinaryDir}") list (APPEND _includeDirs "${_targetSourceDir}") endif() # parse additional include directories from target compile flags set (_targetFlags "") cotire_get_target_compile_flags("${_config}" "${_language}" "${_targetSourceDir}" "${_target}" _targetFlags) cotire_filter_compile_flags("${_language}" "I" _dirs _ignore ${_targetFlags}) if (_dirs) list (APPEND _includeDirs ${_dirs}) endif() # target include directories get_directory_property(_dirs DIRECTORY "${_targetSourceDir}" INCLUDE_DIRECTORIES) if (_target) get_target_property(_targetDirs ${_target} INCLUDE_DIRECTORIES) if (_targetDirs) list (APPEND _dirs ${_targetDirs}) endif() get_target_property(_targetDirs ${_target} INTERFACE_SYSTEM_INCLUDE_DIRECTORIES) if (_targetDirs) list (APPEND _systemIncludeDirs ${_targetDirs}) endif() # interface include directories from linked library targets cotire_get_target_link_libraries_for_usage_requirements(${_target} _linkLibraries) foreach (_library ${_linkLibraries}) if (TARGET ${_library}) get_target_property(_targetDirs ${_library} INTERFACE_INCLUDE_DIRECTORIES) if (_targetDirs) list (APPEND _dirs ${_targetDirs}) endif() get_target_property(_targetDirs ${_library} INTERFACE_SYSTEM_INCLUDE_DIRECTORIES) if (_targetDirs) list (APPEND _systemIncludeDirs ${_targetDirs}) endif() endif() endforeach() endif() if (dirs) list (REMOVE_DUPLICATES _dirs) endif() list (LENGTH _includeDirs _projectInsertIndex) foreach (_dir ${_dirs}) if (CMAKE_INCLUDE_DIRECTORIES_PROJECT_BEFORE) cotire_check_is_path_relative_to("${_dir}" _isRelative "${CMAKE_SOURCE_DIR}" "${CMAKE_BINARY_DIR}") if (_isRelative) list (LENGTH _includeDirs _len) if (_len EQUAL _projectInsertIndex) list (APPEND _includeDirs "${_dir}") else() list (INSERT _includeDirs _projectInsertIndex "${_dir}") endif() math (EXPR _projectInsertIndex "${_projectInsertIndex} + 1") else() list (APPEND _includeDirs "${_dir}") endif() else() list (APPEND _includeDirs "${_dir}") endif() endforeach() list (REMOVE_DUPLICATES _includeDirs) list (REMOVE_DUPLICATES _systemIncludeDirs) if (CMAKE_${_language}_IMPLICIT_INCLUDE_DIRECTORIES) list (REMOVE_ITEM _includeDirs ${CMAKE_${_language}_IMPLICIT_INCLUDE_DIRECTORIES}) endif() if (COTIRE_DEBUG AND _includeDirs) message (STATUS "Target ${_target} include dirs ${_includeDirs}") endif() set (${_includeDirsVar} ${_includeDirs} PARENT_SCOPE) if (COTIRE_DEBUG AND _systemIncludeDirs) message (STATUS "Target ${_target} system include dirs ${_systemIncludeDirs}") endif() set (${_systemIncludeDirsVar} ${_systemIncludeDirs} PARENT_SCOPE) endfunction() macro (cotire_make_C_identifier _identifierVar _str) if (CMAKE_VERSION VERSION_LESS "2.8.12") # mimic CMake SystemTools::MakeCindentifier behavior if ("${_str}" MATCHES "^[0-9].+$") set (_str "_${str}") endif() string (REGEX REPLACE "[^a-zA-Z0-9]" "_" ${_identifierVar} "${_str}") else() string (MAKE_C_IDENTIFIER "${_str}" "${_identifierVar}") endif() endmacro() function (cotire_get_target_export_symbol _target _exportSymbolVar) set (_exportSymbol "") get_target_property(_targetType ${_target} TYPE) get_target_property(_enableExports ${_target} ENABLE_EXPORTS) if (_targetType MATCHES "(SHARED|MODULE)_LIBRARY" OR (_targetType STREQUAL "EXECUTABLE" AND _enableExports)) get_target_property(_exportSymbol ${_target} DEFINE_SYMBOL) if (NOT _exportSymbol) set (_exportSymbol "${_target}_EXPORTS") endif() cotire_make_C_identifier(_exportSymbol "${_exportSymbol}") endif() set (${_exportSymbolVar} ${_exportSymbol} PARENT_SCOPE) endfunction() function (cotire_get_target_compile_definitions _config _language _directory _target _definitionsVar) string (TOUPPER "${_config}" _upperConfig) set (_configDefinitions "") # CMAKE_INTDIR for multi-configuration build systems if (NOT "${CMAKE_CFG_INTDIR}" STREQUAL ".") list (APPEND _configDefinitions "CMAKE_INTDIR=\"${_config}\"") endif() # target export define symbol cotire_get_target_export_symbol("${_target}" _defineSymbol) if (_defineSymbol) list (APPEND _configDefinitions "${_defineSymbol}") endif() # directory compile definitions get_directory_property(_definitions DIRECTORY "${_directory}" COMPILE_DEFINITIONS) if (_definitions) list (APPEND _configDefinitions ${_definitions}) endif() get_directory_property(_definitions DIRECTORY "${_directory}" COMPILE_DEFINITIONS_${_upperConfig}) if (_definitions) list (APPEND _configDefinitions ${_definitions}) endif() # target compile definitions get_target_property(_definitions ${_target} COMPILE_DEFINITIONS) if (_definitions) list (APPEND _configDefinitions ${_definitions}) endif() get_target_property(_definitions ${_target} COMPILE_DEFINITIONS_${_upperConfig}) if (_definitions) list (APPEND _configDefinitions ${_definitions}) endif() # interface compile definitions from linked library targets cotire_get_target_link_libraries_for_usage_requirements(${_target} _linkLibraries) foreach (_library ${_linkLibraries}) if (TARGET ${_library}) get_target_property(_definitions ${_library} INTERFACE_COMPILE_DEFINITIONS) if (_definitions) list (APPEND _configDefinitions ${_definitions}) endif() endif() endforeach() # parse additional compile definitions from target compile flags # and don't look at directory compile definitions, which we already handled set (_targetFlags "") cotire_get_target_compile_flags("${_config}" "${_language}" "" "${_target}" _targetFlags) cotire_filter_compile_flags("${_language}" "D" _definitions _ignore ${_targetFlags}) if (_definitions) list (APPEND _configDefinitions ${_definitions}) endif() list (REMOVE_DUPLICATES _configDefinitions) if (COTIRE_DEBUG AND _configDefinitions) message (STATUS "Target ${_target} compile definitions ${_configDefinitions}") endif() set (${_definitionsVar} ${_configDefinitions} PARENT_SCOPE) endfunction() function (cotire_get_target_compiler_flags _config _language _directory _target _compilerFlagsVar) # parse target compile flags omitting compile definitions and include directives set (_targetFlags "") cotire_get_target_compile_flags("${_config}" "${_language}" "${_directory}" "${_target}" _targetFlags) set (_compilerFlags "") cotire_filter_compile_flags("${_language}" "[ID]" _ignore _compilerFlags ${_targetFlags}) if (COTIRE_DEBUG AND _compilerFlags) message (STATUS "Target ${_target} compiler flags ${_compilerFlags}") endif() set (${_compilerFlagsVar} ${_compilerFlags} PARENT_SCOPE) endfunction() function (cotire_add_sys_root_paths _pathsVar) if (APPLE) if (CMAKE_OSX_SYSROOT AND CMAKE_${_language}_HAS_ISYSROOT) foreach (_path IN LISTS ${_pathsVar}) if (IS_ABSOLUTE "${_path}") get_filename_component(_path "${CMAKE_OSX_SYSROOT}/${_path}" ABSOLUTE) if (EXISTS "${_path}") list (APPEND ${_pathsVar} "${_path}") endif() endif() endforeach() endif() endif() set (${_pathsVar} ${${_pathsVar}} PARENT_SCOPE) if (COTIRE_DEBUG) message (STATUS "${_pathsVar}=${${_pathsVar}}") endif() endfunction() function (cotire_get_source_extra_properties _sourceFile _pattern _resultVar) set (_extraProperties ${ARGN}) set (_result "") if (_extraProperties) list (FIND _extraProperties "${_sourceFile}" _index) if (_index GREATER -1) math (EXPR _index "${_index} + 1") list (LENGTH _extraProperties _len) math (EXPR _len "${_len} - 1") foreach (_index RANGE ${_index} ${_len}) list (GET _extraProperties ${_index} _value) if (_value MATCHES "${_pattern}") list (APPEND _result "${_value}") else() break() endif() endforeach() endif() endif() set (${_resultVar} ${_result} PARENT_SCOPE) endfunction() function (cotire_get_source_compile_definitions _config _language _sourceFile _definitionsVar) set (_compileDefinitions "") if (NOT CMAKE_SCRIPT_MODE_FILE) string (TOUPPER "${_config}" _upperConfig) get_source_file_property(_definitions "${_sourceFile}" COMPILE_DEFINITIONS) if (_definitions) list (APPEND _compileDefinitions ${_definitions}) endif() get_source_file_property(_definitions "${_sourceFile}" COMPILE_DEFINITIONS_${_upperConfig}) if (_definitions) list (APPEND _compileDefinitions ${_definitions}) endif() endif() cotire_get_source_extra_properties("${_sourceFile}" "^[a-zA-Z0-9_]+(=.*)?$" _definitions ${ARGN}) if (_definitions) list (APPEND _compileDefinitions ${_definitions}) endif() if (COTIRE_DEBUG AND _compileDefinitions) message (STATUS "Source ${_sourceFile} compile definitions ${_compileDefinitions}") endif() set (${_definitionsVar} ${_compileDefinitions} PARENT_SCOPE) endfunction() function (cotire_get_source_files_compile_definitions _config _language _definitionsVar) set (_configDefinitions "") foreach (_sourceFile ${ARGN}) cotire_get_source_compile_definitions("${_config}" "${_language}" "${_sourceFile}" _sourceDefinitions) if (_sourceDefinitions) list (APPEND _configDefinitions "${_sourceFile}" ${_sourceDefinitions} "-") endif() endforeach() set (${_definitionsVar} ${_configDefinitions} PARENT_SCOPE) endfunction() function (cotire_get_source_undefs _sourceFile _property _sourceUndefsVar) set (_sourceUndefs "") if (NOT CMAKE_SCRIPT_MODE_FILE) get_source_file_property(_undefs "${_sourceFile}" ${_property}) if (_undefs) list (APPEND _sourceUndefs ${_undefs}) endif() endif() cotire_get_source_extra_properties("${_sourceFile}" "^[a-zA-Z0-9_]+$" _undefs ${ARGN}) if (_undefs) list (APPEND _sourceUndefs ${_undefs}) endif() if (COTIRE_DEBUG AND _sourceUndefs) message (STATUS "Source ${_sourceFile} ${_property} undefs ${_sourceUndefs}") endif() set (${_sourceUndefsVar} ${_sourceUndefs} PARENT_SCOPE) endfunction() function (cotire_get_source_files_undefs _property _sourceUndefsVar) set (_sourceUndefs "") foreach (_sourceFile ${ARGN}) cotire_get_source_undefs("${_sourceFile}" ${_property} _undefs) if (_undefs) list (APPEND _sourceUndefs "${_sourceFile}" ${_undefs} "-") endif() endforeach() set (${_sourceUndefsVar} ${_sourceUndefs} PARENT_SCOPE) endfunction() macro (cotire_set_cmd_to_prologue _cmdVar) set (${_cmdVar} "${CMAKE_COMMAND}") if (COTIRE_DEBUG) list (APPEND ${_cmdVar} "--warn-uninitialized") endif() list (APPEND ${_cmdVar} "-DCOTIRE_BUILD_TYPE:STRING=$") if (COTIRE_VERBOSE) list (APPEND ${_cmdVar} "-DCOTIRE_VERBOSE:BOOL=ON") elseif("${CMAKE_GENERATOR}" MATCHES "Makefiles") list (APPEND ${_cmdVar} "-DCOTIRE_VERBOSE:BOOL=$(VERBOSE)") endif() endmacro() function (cotire_init_compile_cmd _cmdVar _language _compilerExe _compilerArg1) if (NOT _compilerExe) set (_compilerExe "${CMAKE_${_language}_COMPILER}") endif() if (NOT _compilerArg1) set (_compilerArg1 ${CMAKE_${_language}_COMPILER_ARG1}) endif() string (STRIP "${_compilerArg1}" _compilerArg1) set (${_cmdVar} "${_compilerExe}" ${_compilerArg1} PARENT_SCOPE) endfunction() macro (cotire_add_definitions_to_cmd _cmdVar _language) foreach (_definition ${ARGN}) if (WIN32 AND CMAKE_${_language}_COMPILER_ID MATCHES "MSVC|Intel") list (APPEND ${_cmdVar} "/D${_definition}") else() list (APPEND ${_cmdVar} "-D${_definition}") endif() endforeach() endmacro() macro (cotire_add_includes_to_cmd _cmdVar _language _includeSystemFlag _includesVar _systemIncludesVar) foreach (_include ${${_includesVar}}) if (WIN32 AND CMAKE_${_language}_COMPILER_ID MATCHES "MSVC|Intel") file (TO_NATIVE_PATH "${_include}" _include) list (APPEND ${_cmdVar} "/I${_include}") else() list (FIND ${_systemIncludesVar} ${_include} _index) if(_index GREATER -1 AND NOT "${_includeSystemFlag}" STREQUAL "") list (APPEND ${_cmdVar} "${_includeSystemFlag}${_include}") else() list (APPEND ${_cmdVar} "-I${_include}") endif() endif() endforeach() endmacro() macro (cotire_add_frameworks_to_cmd _cmdVar _language) if (APPLE) set (_frameWorkDirs "") foreach (_include ${ARGN}) if (IS_ABSOLUTE "${_include}" AND _include MATCHES "\\.framework$") get_filename_component(_frameWorkDir "${_include}" PATH) list (APPEND _frameWorkDirs "${_frameWorkDir}") endif() endforeach() if (_frameWorkDirs) list (REMOVE_DUPLICATES _frameWorkDirs) foreach (_frameWorkDir ${_frameWorkDirs}) list (APPEND ${_cmdVar} "-F${_frameWorkDir}") endforeach() endif() endif() endmacro() macro (cotire_add_compile_flags_to_cmd _cmdVar) foreach (_flag ${ARGN}) list (APPEND ${_cmdVar} "${_flag}") endforeach() endmacro() function (cotire_check_file_up_to_date _fileIsUpToDateVar _file) set (${_fileIsUpToDateVar} FALSE PARENT_SCOPE) set (_triggerFile "") foreach (_dependencyFile ${ARGN}) if (EXISTS "${_dependencyFile}" AND "${_dependencyFile}" IS_NEWER_THAN "${_file}") set (_triggerFile "${_dependencyFile}") break() endif() endforeach() get_filename_component(_fileName "${_file}" NAME) if (EXISTS "${_file}") if (_triggerFile) if (COTIRE_VERBOSE) message (STATUS "${_fileName} update triggered by ${_triggerFile} change.") endif() else() if (COTIRE_VERBOSE) message (STATUS "${_fileName} is up-to-date.") endif() set (${_fileIsUpToDateVar} TRUE PARENT_SCOPE) endif() else() if (COTIRE_VERBOSE) message (STATUS "${_fileName} does not exist yet.") endif() endif() endfunction() macro (cotire_find_closest_relative_path _headerFile _includeDirs _relPathVar) set (${_relPathVar} "") foreach (_includeDir ${_includeDirs}) if (IS_DIRECTORY "${_includeDir}") file (RELATIVE_PATH _relPath "${_includeDir}" "${_headerFile}") if (NOT IS_ABSOLUTE "${_relPath}" AND NOT "${_relPath}" MATCHES "^\\.\\.") string (LENGTH "${${_relPathVar}}" _closestLen) string (LENGTH "${_relPath}" _relLen) if (_closestLen EQUAL 0 OR _relLen LESS _closestLen) set (${_relPathVar} "${_relPath}") endif() endif() elseif ("${_includeDir}" STREQUAL "${_headerFile}") # if path matches exactly, return short non-empty string set (${_relPathVar} "1") break() endif() endforeach() endmacro() macro (cotire_check_header_file_location _headerFile _insideIncudeDirs _outsideIncudeDirs _headerIsInside) # check header path against ignored and honored include directories cotire_find_closest_relative_path("${_headerFile}" "${_insideIncudeDirs}" _insideRelPath) if (_insideRelPath) # header is inside, but could be become outside if there is a shorter outside match cotire_find_closest_relative_path("${_headerFile}" "${_outsideIncudeDirs}" _outsideRelPath) if (_outsideRelPath) string (LENGTH "${_insideRelPath}" _insideRelPathLen) string (LENGTH "${_outsideRelPath}" _outsideRelPathLen) if (_outsideRelPathLen LESS _insideRelPathLen) set (${_headerIsInside} FALSE) else() set (${_headerIsInside} TRUE) endif() else() set (${_headerIsInside} TRUE) endif() else() # header is outside set (${_headerIsInside} FALSE) endif() endmacro() macro (cotire_check_ignore_header_file_path _headerFile _headerIsIgnoredVar) if (NOT EXISTS "${_headerFile}") set (${_headerIsIgnoredVar} TRUE) elseif (IS_DIRECTORY "${_headerFile}") set (${_headerIsIgnoredVar} TRUE) elseif ("${_headerFile}" MATCHES "\\.\\.|[_-]fixed" AND "${_headerFile}" MATCHES "\\.h$") # heuristic: ignore C headers with embedded parent directory references or "-fixed" or "_fixed" in path # these often stem from using GCC #include_next tricks, which may break the precompiled header compilation # with the error message "error: no include path in which to search for header.h" set (${_headerIsIgnoredVar} TRUE) else() set (${_headerIsIgnoredVar} FALSE) endif() endmacro() macro (cotire_check_ignore_header_file_ext _headerFile _ignoreExtensionsVar _headerIsIgnoredVar) # check header file extension cotire_get_source_file_extension("${_headerFile}" _headerFileExt) set (${_headerIsIgnoredVar} FALSE) if (_headerFileExt) list (FIND ${_ignoreExtensionsVar} "${_headerFileExt}" _index) if (_index GREATER -1) set (${_headerIsIgnoredVar} TRUE) endif() endif() endmacro() macro (cotire_parse_line _line _headerFileVar _headerDepthVar) if (MSVC) # cl.exe /showIncludes output looks different depending on the language pack used, e.g.: # English: "Note: including file: C:\directory\file" # German: "Hinweis: Einlesen der Datei: C:\directory\file" # We use a very general regular expression, relying on the presence of the : characters if (_line MATCHES "( +)([a-zA-Z]:[^:]+)$") # Visual Studio compiler output string (LENGTH "${CMAKE_MATCH_1}" ${_headerDepthVar}) get_filename_component(${_headerFileVar} "${CMAKE_MATCH_2}" ABSOLUTE) else() set (${_headerFileVar} "") set (${_headerDepthVar} 0) endif() else() if (_line MATCHES "^(\\.+) (.*)$") # GCC like output string (LENGTH "${CMAKE_MATCH_1}" ${_headerDepthVar}) if (IS_ABSOLUTE "${CMAKE_MATCH_2}") set (${_headerFileVar} "${CMAKE_MATCH_2}") else() get_filename_component(${_headerFileVar} "${CMAKE_MATCH_2}" REALPATH) endif() else() set (${_headerFileVar} "") set (${_headerDepthVar} 0) endif() endif() endmacro() function (cotire_parse_includes _language _scanOutput _ignoredIncudeDirs _honoredIncudeDirs _ignoredExtensions _selectedIncludesVar _unparsedLinesVar) if (WIN32) # prevent CMake macro invocation errors due to backslash characters in Windows paths string (REPLACE "\\" "/" _scanOutput "${_scanOutput}") endif() # canonize slashes string (REPLACE "//" "/" _scanOutput "${_scanOutput}") # prevent semicolon from being interpreted as a line separator string (REPLACE ";" "\\;" _scanOutput "${_scanOutput}") # then separate lines string (REGEX REPLACE "\n" ";" _scanOutput "${_scanOutput}") list (LENGTH _scanOutput _len) # remove duplicate lines to speed up parsing list (REMOVE_DUPLICATES _scanOutput) list (LENGTH _scanOutput _uniqueLen) if (COTIRE_VERBOSE OR COTIRE_DEBUG) message (STATUS "Scanning ${_uniqueLen} unique lines of ${_len} for includes") if (_ignoredExtensions) message (STATUS "Ignored extensions: ${_ignoredExtensions}") endif() if (_ignoredIncudeDirs) message (STATUS "Ignored paths: ${_ignoredIncudeDirs}") endif() if (_honoredIncudeDirs) message (STATUS "Included paths: ${_honoredIncudeDirs}") endif() endif() set (_sourceFiles ${ARGN}) set (_selectedIncludes "") set (_unparsedLines "") # stack keeps track of inside/outside project status of processed header files set (_headerIsInsideStack "") foreach (_line IN LISTS _scanOutput) if (_line) cotire_parse_line("${_line}" _headerFile _headerDepth) if (_headerFile) cotire_check_header_file_location("${_headerFile}" "${_ignoredIncudeDirs}" "${_honoredIncudeDirs}" _headerIsInside) if (COTIRE_DEBUG) message (STATUS "${_headerDepth}: ${_headerFile} ${_headerIsInside}") endif() # update stack list (LENGTH _headerIsInsideStack _stackLen) if (_headerDepth GREATER _stackLen) math (EXPR _stackLen "${_stackLen} + 1") foreach (_index RANGE ${_stackLen} ${_headerDepth}) list (APPEND _headerIsInsideStack ${_headerIsInside}) endforeach() else() foreach (_index RANGE ${_headerDepth} ${_stackLen}) list (REMOVE_AT _headerIsInsideStack -1) endforeach() list (APPEND _headerIsInsideStack ${_headerIsInside}) endif() if (COTIRE_DEBUG) message (STATUS "${_headerIsInsideStack}") endif() # header is a candidate if it is outside project if (NOT _headerIsInside) # get parent header file's inside/outside status if (_headerDepth GREATER 1) math (EXPR _index "${_headerDepth} - 2") list (GET _headerIsInsideStack ${_index} _parentHeaderIsInside) else() set (_parentHeaderIsInside TRUE) endif() # select header file if parent header file is inside project # (e.g., a project header file that includes a standard header file) if (_parentHeaderIsInside) cotire_check_ignore_header_file_path("${_headerFile}" _headerIsIgnored) if (NOT _headerIsIgnored) cotire_check_ignore_header_file_ext("${_headerFile}" _ignoredExtensions _headerIsIgnored) if (NOT _headerIsIgnored) list (APPEND _selectedIncludes "${_headerFile}") else() # fix header's inside status on stack, it is ignored by extension now list (REMOVE_AT _headerIsInsideStack -1) list (APPEND _headerIsInsideStack TRUE) endif() endif() if (COTIRE_DEBUG) message (STATUS "${_headerFile} ${_ignoredExtensions} ${_headerIsIgnored}") endif() endif() endif() else() if (MSVC) # for cl.exe do not keep unparsed lines which solely consist of a source file name string (FIND "${_sourceFiles}" "${_line}" _index) if (_index LESS 0) list (APPEND _unparsedLines "${_line}") endif() else() list (APPEND _unparsedLines "${_line}") endif() endif() endif() endforeach() list (REMOVE_DUPLICATES _selectedIncludes) set (${_selectedIncludesVar} ${_selectedIncludes} PARENT_SCOPE) set (${_unparsedLinesVar} ${_unparsedLines} PARENT_SCOPE) endfunction() function (cotire_scan_includes _includesVar) set(_options "") set(_oneValueArgs COMPILER_ID COMPILER_EXECUTABLE COMPILER_VERSION INCLUDE_SYSTEM_FLAG LANGUAGE UNPARSED_LINES) set(_multiValueArgs COMPILE_DEFINITIONS COMPILE_FLAGS INCLUDE_DIRECTORIES SYSTEM_INCLUDE_DIRECTORIES IGNORE_PATH INCLUDE_PATH IGNORE_EXTENSIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) set (_sourceFiles ${_option_UNPARSED_ARGUMENTS}) if (NOT _option_LANGUAGE) set (_option_LANGUAGE "CXX") endif() if (NOT _option_COMPILER_ID) set (_option_COMPILER_ID "${CMAKE_${_option_LANGUAGE}_ID}") endif() set (_cmd "${_option_COMPILER_EXECUTABLE}" ${_option_COMPILER_ARG1}) cotire_init_compile_cmd(_cmd "${_option_LANGUAGE}" "${_option_COMPILER_EXECUTABLE}" "${_option_COMPILER_ARG1}") cotire_add_definitions_to_cmd(_cmd "${_option_LANGUAGE}" ${_option_COMPILE_DEFINITIONS}) cotire_add_compile_flags_to_cmd(_cmd ${_option_COMPILE_FLAGS}) cotire_add_includes_to_cmd(_cmd "${_option_LANGUAGE}" "${_option_INCLUDE_SYSTEM_FLAG}" _option_INCLUDE_DIRECTORIES _option_SYSTEM_INCLUDE_DIRECTORIES) cotire_add_frameworks_to_cmd(_cmd "${_option_LANGUAGE}" ${_option_INCLUDE_DIRECTORIES}) cotire_add_makedep_flags("${_option_LANGUAGE}" "${_option_COMPILER_ID}" "${_option_COMPILER_VERSION}" _cmd) # only consider existing source files for scanning set (_existingSourceFiles "") foreach (_sourceFile ${_sourceFiles}) if (EXISTS "${_sourceFile}") list (APPEND _existingSourceFiles "${_sourceFile}") endif() endforeach() if (NOT _existingSourceFiles) set (${_includesVar} "" PARENT_SCOPE) return() endif() list (APPEND _cmd ${_existingSourceFiles}) if (COTIRE_VERBOSE) message (STATUS "execute_process: ${_cmd}") endif() if (_option_COMPILER_ID MATCHES "MSVC") if (COTIRE_DEBUG) message (STATUS "clearing VS_UNICODE_OUTPUT") endif() # cl.exe messes with the output streams unless the environment variable VS_UNICODE_OUTPUT is cleared unset (ENV{VS_UNICODE_OUTPUT}) endif() execute_process( COMMAND ${_cmd} WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" RESULT_VARIABLE _result OUTPUT_QUIET ERROR_VARIABLE _output) if (_result) message (STATUS "Result ${_result} scanning includes of ${_existingSourceFiles}.") endif() cotire_parse_includes( "${_option_LANGUAGE}" "${_output}" "${_option_IGNORE_PATH}" "${_option_INCLUDE_PATH}" "${_option_IGNORE_EXTENSIONS}" _includes _unparsedLines ${_sourceFiles}) set (${_includesVar} ${_includes} PARENT_SCOPE) if (_option_UNPARSED_LINES) set (${_option_UNPARSED_LINES} ${_unparsedLines} PARENT_SCOPE) endif() endfunction() macro (cotire_append_undefs _contentsVar) set (_undefs ${ARGN}) if (_undefs) list (REMOVE_DUPLICATES _undefs) foreach (_definition ${_undefs}) list (APPEND ${_contentsVar} "#undef ${_definition}") endforeach() endif() endmacro() macro (cotire_comment_str _language _commentText _commentVar) if ("${_language}" STREQUAL "CMAKE") set (${_commentVar} "# ${_commentText}") else() set (${_commentVar} "/* ${_commentText} */") endif() endmacro() function (cotire_write_file _language _file _contents _force) get_filename_component(_moduleName "${COTIRE_CMAKE_MODULE_FILE}" NAME) cotire_comment_str("${_language}" "${_moduleName} ${COTIRE_CMAKE_MODULE_VERSION} generated file" _header1) cotire_comment_str("${_language}" "${_file}" _header2) set (_contents "${_header1}\n${_header2}\n${_contents}") if (COTIRE_DEBUG) message (STATUS "${_contents}") endif() if (_force OR NOT EXISTS "${_file}") file (WRITE "${_file}" "${_contents}") else() file (READ "${_file}" _oldContents) if (NOT "${_oldContents}" STREQUAL "${_contents}") file (WRITE "${_file}" "${_contents}") else() if (COTIRE_DEBUG) message (STATUS "${_file} unchanged") endif() endif() endif() endfunction() function (cotire_generate_unity_source _unityFile) set(_options "") set(_oneValueArgs LANGUAGE) set(_multiValueArgs DEPENDS SOURCE_LOCATIONS SOURCES_COMPILE_DEFINITIONS PRE_UNDEFS SOURCES_PRE_UNDEFS POST_UNDEFS SOURCES_POST_UNDEFS PROLOGUE EPILOGUE) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if (_option_DEPENDS) cotire_check_file_up_to_date(_unityFileIsUpToDate "${_unityFile}" ${_option_DEPENDS}) if (_unityFileIsUpToDate) return() endif() endif() set (_sourceFiles ${_option_UNPARSED_ARGUMENTS}) if (NOT _option_PRE_UNDEFS) set (_option_PRE_UNDEFS "") endif() if (NOT _option_SOURCES_PRE_UNDEFS) set (_option_SOURCES_PRE_UNDEFS "") endif() if (NOT _option_POST_UNDEFS) set (_option_POST_UNDEFS "") endif() if (NOT _option_SOURCES_POST_UNDEFS) set (_option_SOURCES_POST_UNDEFS "") endif() set (_contents "") if (_option_PROLOGUE) list (APPEND _contents ${_option_PROLOGUE}) endif() if (_option_LANGUAGE AND _sourceFiles) if ("${_option_LANGUAGE}" STREQUAL "CXX") list (APPEND _contents "#ifdef __cplusplus") elseif ("${_option_LANGUAGE}" STREQUAL "C") list (APPEND _contents "#ifndef __cplusplus") endif() endif() set (_compileUndefinitions "") set (_index 0) foreach (_sourceFile ${_sourceFiles}) cotire_get_source_compile_definitions( "${_option_CONFIGURATION}" "${_option_LANGUAGE}" "${_sourceFile}" _compileDefinitions ${_option_SOURCES_COMPILE_DEFINITIONS}) cotire_get_source_undefs("${_sourceFile}" COTIRE_UNITY_SOURCE_PRE_UNDEFS _sourcePreUndefs ${_option_SOURCES_PRE_UNDEFS}) cotire_get_source_undefs("${_sourceFile}" COTIRE_UNITY_SOURCE_POST_UNDEFS _sourcePostUndefs ${_option_SOURCES_POST_UNDEFS}) if (_option_PRE_UNDEFS) list (APPEND _compileUndefinitions ${_option_PRE_UNDEFS}) endif() if (_sourcePreUndefs) list (APPEND _compileUndefinitions ${_sourcePreUndefs}) endif() if (_compileUndefinitions) cotire_append_undefs(_contents ${_compileUndefinitions}) set (_compileUndefinitions "") endif() if (_sourcePostUndefs) list (APPEND _compileUndefinitions ${_sourcePostUndefs}) endif() if (_option_POST_UNDEFS) list (APPEND _compileUndefinitions ${_option_POST_UNDEFS}) endif() foreach (_definition ${_compileDefinitions}) if (_definition MATCHES "^([a-zA-Z0-9_]+)=(.+)$") list (APPEND _contents "#define ${CMAKE_MATCH_1} ${CMAKE_MATCH_2}") list (INSERT _compileUndefinitions 0 "${CMAKE_MATCH_1}") else() list (APPEND _contents "#define ${_definition}") list (INSERT _compileUndefinitions 0 "${_definition}") endif() endforeach() if (_option_SOURCE_LOCATIONS) # use explicitly provided source file location list (GET _option_SOURCE_LOCATIONS ${_index} _sourceFileLocation) else() # use absolute path as source file location get_filename_component(_sourceFileLocation "${_sourceFile}" ABSOLUTE) endif() if (WIN32) file (TO_NATIVE_PATH "${_sourceFileLocation}" _sourceFileLocation) endif() list (APPEND _contents "#include \"${_sourceFileLocation}\"") math (EXPR _index "${_index} + 1") endforeach() if (_compileUndefinitions) cotire_append_undefs(_contents ${_compileUndefinitions}) set (_compileUndefinitions "") endif() if (_option_LANGUAGE AND _sourceFiles) list (APPEND _contents "#endif") endif() if (_option_EPILOGUE) list (APPEND _contents ${_option_EPILOGUE}) endif() list (APPEND _contents "") string (REPLACE ";" "\n" _contents "${_contents}") if (COTIRE_VERBOSE) message ("${_contents}") endif() cotire_write_file("${_option_LANGUAGE}" "${_unityFile}" "${_contents}" TRUE) endfunction() function (cotire_generate_prefix_header _prefixFile) set(_options "") set(_oneValueArgs LANGUAGE COMPILER_EXECUTABLE COMPILER_ID COMPILER_VERSION INCLUDE_SYSTEM_FLAG) set(_multiValueArgs DEPENDS COMPILE_DEFINITIONS COMPILE_FLAGS INCLUDE_DIRECTORIES SYSTEM_INCLUDE_DIRECTORIES IGNORE_PATH INCLUDE_PATH IGNORE_EXTENSIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if (_option_DEPENDS) cotire_check_file_up_to_date(_prefixFileIsUpToDate "${_prefixFile}" ${_option_DEPENDS}) if (_prefixFileIsUpToDate) set (_unparsedLinesFile "${_prefixFile}.log") file (WRITE "${_unparsedLinesFile}" "") return() endif() endif() set (_prologue "") set (_epilogue "") if (_option_COMPILER_ID MATCHES "Clang") set (_prologue "#pragma clang system_header") elseif (_option_COMPILER_ID MATCHES "GNU") set (_prologue "#pragma GCC system_header") elseif (_option_COMPILER_ID MATCHES "MSVC") set (_prologue "#pragma warning(push, 0)") set (_epilogue "#pragma warning(pop)") elseif (_option_COMPILER_ID MATCHES "Intel") # Intel compiler requires hdrstop pragma to stop generating PCH file set (_epilogue "#pragma hdrstop") endif() set (_sourceFiles ${_option_UNPARSED_ARGUMENTS}) cotire_scan_includes(_selectedHeaders ${_sourceFiles} LANGUAGE "${_option_LANGUAGE}" COMPILER_EXECUTABLE "${_option_COMPILER_EXECUTABLE}" COMPILER_ID "${_option_COMPILER_ID}" COMPILER_VERSION "${_option_COMPILER_VERSION}" COMPILE_DEFINITIONS ${_option_COMPILE_DEFINITIONS} COMPILE_FLAGS ${_option_COMPILE_FLAGS} INCLUDE_DIRECTORIES ${_option_INCLUDE_DIRECTORIES} INCLUDE_SYSTEM_FLAG ${_option_INCLUDE_SYSTEM_FLAG} SYSTEM_INCLUDE_DIRECTORIES ${_option_SYSTEM_INCLUDE_DIRECTORIES} IGNORE_PATH ${_option_IGNORE_PATH} INCLUDE_PATH ${_option_INCLUDE_PATH} IGNORE_EXTENSIONS ${_option_IGNORE_EXTENSIONS} UNPARSED_LINES _unparsedLines) cotire_generate_unity_source("${_prefixFile}" PROLOGUE ${_prologue} EPILOGUE ${_epilogue} LANGUAGE "${_option_LANGUAGE}" ${_selectedHeaders}) set (_unparsedLinesFile "${_prefixFile}.log") if (_unparsedLines) if (COTIRE_VERBOSE OR NOT _selectedHeaders) list (LENGTH _unparsedLines _skippedLineCount) file (RELATIVE_PATH _unparsedLinesFileRelPath "${CMAKE_BINARY_DIR}" "${_unparsedLinesFile}") message (STATUS "${_skippedLineCount} line(s) skipped, see ${_unparsedLinesFileRelPath}") endif() string (REPLACE ";" "\n" _unparsedLines "${_unparsedLines}") endif() file (WRITE "${_unparsedLinesFile}" "${_unparsedLines}") endfunction() function (cotire_add_makedep_flags _language _compilerID _compilerVersion _flagsVar) set (_flags ${${_flagsVar}}) if (_compilerID MATCHES "MSVC") # cl.exe options used # /nologo suppresses display of sign-on banner # /TC treat all files named on the command line as C source files # /TP treat all files named on the command line as C++ source files # /EP preprocess to stdout without #line directives # /showIncludes list include files set (_sourceFileTypeC "/TC") set (_sourceFileTypeCXX "/TP") if (_flags) # append to list list (APPEND _flags /nologo "${_sourceFileType${_language}}" /EP /showIncludes) else() # return as a flag string set (_flags "${_sourceFileType${_language}} /EP /showIncludes") endif() elseif (_compilerID MATCHES "GNU") # GCC options used # -H print the name of each header file used # -E invoke preprocessor # -fdirectives-only do not expand macros, requires GCC >= 4.3 if (_flags) # append to list list (APPEND _flags -H -E) if (NOT "${_compilerVersion}" VERSION_LESS "4.3.0") list (APPEND _flags "-fdirectives-only") endif() else() # return as a flag string set (_flags "-H -E") if (NOT "${_compilerVersion}" VERSION_LESS "4.3.0") set (_flags "${_flags} -fdirectives-only") endif() endif() elseif (_compilerID MATCHES "Clang") # Clang options used # -H print the name of each header file used # -E invoke preprocessor if (_flags) # append to list list (APPEND _flags -H -E) else() # return as a flag string set (_flags "-H -E") endif() elseif (_compilerID MATCHES "Intel") if (WIN32) # Windows Intel options used # /nologo do not display compiler version information # /QH display the include file order # /EP preprocess to stdout, omitting #line directives # /TC process all source or unrecognized file types as C source files # /TP process all source or unrecognized file types as C++ source files set (_sourceFileTypeC "/TC") set (_sourceFileTypeCXX "/TP") if (_flags) # append to list list (APPEND _flags /nologo "${_sourceFileType${_language}}" /EP /QH) else() # return as a flag string set (_flags "${_sourceFileType${_language}} /EP /QH") endif() else() # Linux / Mac OS X Intel options used # -H print the name of each header file used # -EP preprocess to stdout, omitting #line directives # -Kc++ process all source or unrecognized file types as C++ source files if (_flags) # append to list if ("${_language}" STREQUAL "CXX") list (APPEND _flags -Kc++) endif() list (APPEND _flags -H -EP) else() # return as a flag string if ("${_language}" STREQUAL "CXX") set (_flags "-Kc++ ") endif() set (_flags "${_flags}-H -EP") endif() endif() else() message (FATAL_ERROR "cotire: unsupported ${_language} compiler ${_compilerID} version ${_compilerVersion}.") endif() set (${_flagsVar} ${_flags} PARENT_SCOPE) endfunction() function (cotire_add_pch_compilation_flags _language _compilerID _compilerVersion _prefixFile _pchFile _hostFile _flagsVar) set (_flags ${${_flagsVar}}) if (_compilerID MATCHES "MSVC") file (TO_NATIVE_PATH "${_prefixFile}" _prefixFileNative) file (TO_NATIVE_PATH "${_pchFile}" _pchFileNative) file (TO_NATIVE_PATH "${_hostFile}" _hostFileNative) # cl.exe options used # /Yc creates a precompiled header file # /Fp specifies precompiled header binary file name # /FI forces inclusion of file # /TC treat all files named on the command line as C source files # /TP treat all files named on the command line as C++ source files # /Zs syntax check only set (_sourceFileTypeC "/TC") set (_sourceFileTypeCXX "/TP") if (_flags) # append to list list (APPEND _flags /nologo "${_sourceFileType${_language}}" "/Yc${_prefixFileNative}" "/Fp${_pchFileNative}" "/FI${_prefixFileNative}" /Zs "${_hostFileNative}") else() # return as a flag string set (_flags "/Yc\"${_prefixFileNative}\" /Fp\"${_pchFileNative}\" /FI\"${_prefixFileNative}\"") endif() elseif (_compilerID MATCHES "GNU|Clang") # GCC / Clang options used # -x specify the source language # -c compile but do not link # -o place output in file # note that we cannot use -w to suppress all warnings upon pre-compiling, because turning off a warning may # alter compile flags as a side effect (e.g., -Wwrite-string implies -fconst-strings) set (_xLanguage_C "c-header") set (_xLanguage_CXX "c++-header") if (_flags) # append to list list (APPEND _flags "-x" "${_xLanguage_${_language}}" "-c" "${_prefixFile}" -o "${_pchFile}") else() # return as a flag string set (_flags "-x ${_xLanguage_${_language}} -c \"${_prefixFile}\" -o \"${_pchFile}\"") endif() elseif (_compilerID MATCHES "Intel") if (WIN32) file (TO_NATIVE_PATH "${_prefixFile}" _prefixFileNative) file (TO_NATIVE_PATH "${_pchFile}" _pchFileNative) file (TO_NATIVE_PATH "${_hostFile}" _hostFileNative) # Windows Intel options used # /nologo do not display compiler version information # /Yc create a precompiled header (PCH) file # /Fp specify a path or file name for precompiled header files # /FI tells the preprocessor to include a specified file name as the header file # /TC process all source or unrecognized file types as C source files # /TP process all source or unrecognized file types as C++ source files # /Zs syntax check only # /Wpch-messages enable diagnostics related to pre-compiled headers (requires Intel XE 2013 Update 2) set (_sourceFileTypeC "/TC") set (_sourceFileTypeCXX "/TP") if (_flags) # append to list list (APPEND _flags /nologo "${_sourceFileType${_language}}" "/Yc" "/Fp${_pchFileNative}" "/FI${_prefixFileNative}" /Zs "${_hostFileNative}") if (NOT "${_compilerVersion}" VERSION_LESS "13.1.0") list (APPEND _flags "/Wpch-messages") endif() else() # return as a flag string set (_flags "/Yc /Fp\"${_pchFileNative}\" /FI\"${_prefixFileNative}\"") if (NOT "${_compilerVersion}" VERSION_LESS "13.1.0") set (_flags "${_flags} /Wpch-messages") endif() endif() else() # Linux / Mac OS X Intel options used # -pch-dir location for precompiled header files # -pch-create name of the precompiled header (PCH) to create # -Kc++ process all source or unrecognized file types as C++ source files # -fsyntax-only check only for correct syntax # -Wpch-messages enable diagnostics related to pre-compiled headers (requires Intel XE 2013 Update 2) get_filename_component(_pchDir "${_pchFile}" PATH) get_filename_component(_pchName "${_pchFile}" NAME) set (_xLanguage_C "c-header") set (_xLanguage_CXX "c++-header") if (_flags) # append to list if ("${_language}" STREQUAL "CXX") list (APPEND _flags -Kc++) endif() list (APPEND _flags "-include" "${_prefixFile}" "-pch-dir" "${_pchDir}" "-pch-create" "${_pchName}" "-fsyntax-only" "${_hostFile}") if (NOT "${_compilerVersion}" VERSION_LESS "13.1.0") list (APPEND _flags "-Wpch-messages") endif() else() # return as a flag string set (_flags "-include \"${_prefixFile}\" -pch-dir \"${_pchDir}\" -pch-create \"${_pchName}\"") if (NOT "${_compilerVersion}" VERSION_LESS "13.1.0") set (_flags "${_flags} -Wpch-messages") endif() endif() endif() else() message (FATAL_ERROR "cotire: unsupported ${_language} compiler ${_compilerID} version ${_compilerVersion}.") endif() set (${_flagsVar} ${_flags} PARENT_SCOPE) endfunction() function (cotire_add_prefix_pch_inclusion_flags _language _compilerID _compilerVersion _prefixFile _pchFile _flagsVar) set (_flags ${${_flagsVar}}) if (_compilerID MATCHES "MSVC") file (TO_NATIVE_PATH "${_prefixFile}" _prefixFileNative) # cl.exe options used # /Yu uses a precompiled header file during build # /Fp specifies precompiled header binary file name # /FI forces inclusion of file if (_pchFile) file (TO_NATIVE_PATH "${_pchFile}" _pchFileNative) if (_flags) # append to list list (APPEND _flags "/Yu${_prefixFileNative}" "/Fp${_pchFileNative}" "/FI${_prefixFileNative}") else() # return as a flag string set (_flags "/Yu\"${_prefixFileNative}\" /Fp\"${_pchFileNative}\" /FI\"${_prefixFileNative}\"") endif() else() # no precompiled header, force inclusion of prefix header if (_flags) # append to list list (APPEND _flags "/FI${_prefixFileNative}") else() # return as a flag string set (_flags "/FI\"${_prefixFileNative}\"") endif() endif() elseif (_compilerID MATCHES "GNU") # GCC options used # -include process include file as the first line of the primary source file # -Winvalid-pch warns if precompiled header is found but cannot be used # note: ccache requires the -include flag to be used in order to process precompiled header correctly if (_flags) # append to list list (APPEND _flags "-Winvalid-pch" "-include" "${_prefixFile}") else() # return as a flag string set (_flags "-Winvalid-pch -include \"${_prefixFile}\"") endif() elseif (_compilerID MATCHES "Clang") # Clang options used # -include process include file as the first line of the primary source file # -include-pch include precompiled header file # -Qunused-arguments don't emit warning for unused driver arguments # note: ccache requires the -include flag to be used in order to process precompiled header correctly if (_flags) # append to list list (APPEND _flags "-Qunused-arguments" "-include" "${_prefixFile}") else() # return as a flag string set (_flags "-Qunused-arguments -include \"${_prefixFile}\"") endif() elseif (_compilerID MATCHES "Intel") if (WIN32) file (TO_NATIVE_PATH "${_prefixFile}" _prefixFileNative) # Windows Intel options used # /Yu use a precompiled header (PCH) file # /Fp specify a path or file name for precompiled header files # /FI tells the preprocessor to include a specified file name as the header file # /Wpch-messages enable diagnostics related to pre-compiled headers (requires Intel XE 2013 Update 2) if (_pchFile) file (TO_NATIVE_PATH "${_pchFile}" _pchFileNative) if (_flags) # append to list list (APPEND _flags "/Yu" "/Fp${_pchFileNative}" "/FI${_prefixFileNative}") if (NOT "${_compilerVersion}" VERSION_LESS "13.1.0") list (APPEND _flags "/Wpch-messages") endif() else() # return as a flag string set (_flags "/Yu /Fp\"${_pchFileNative}\" /FI\"${_prefixFileNative}\"") if (NOT "${_compilerVersion}" VERSION_LESS "13.1.0") set (_flags "${_flags} /Wpch-messages") endif() endif() else() # no precompiled header, force inclusion of prefix header if (_flags) # append to list list (APPEND _flags "/FI${_prefixFileNative}") else() # return as a flag string set (_flags "/FI\"${_prefixFileNative}\"") endif() endif() else() # Linux / Mac OS X Intel options used # -pch-dir location for precompiled header files # -pch-use name of the precompiled header (PCH) to use # -include process include file as the first line of the primary source file # -Wpch-messages enable diagnostics related to pre-compiled headers (requires Intel XE 2013 Update 2) if (_pchFile) get_filename_component(_pchDir "${_pchFile}" PATH) get_filename_component(_pchName "${_pchFile}" NAME) if (_flags) # append to list list (APPEND _flags "-include" "${_prefixFile}" "-pch-dir" "${_pchDir}" "-pch-use" "${_pchName}") if (NOT "${_compilerVersion}" VERSION_LESS "13.1.0") list (APPEND _flags "-Wpch-messages") endif() else() # return as a flag string set (_flags "-include \"${_prefixFile}\" -pch-dir \"${_pchDir}\" -pch-use \"${_pchName}\"") if (NOT "${_compilerVersion}" VERSION_LESS "13.1.0") set (_flags "${_flags} -Wpch-messages") endif() endif() else() # no precompiled header, force inclusion of prefix header if (_flags) # append to list list (APPEND _flags "-include" "${_prefixFile}") else() # return as a flag string set (_flags "-include \"${_prefixFile}\"") endif() endif() endif() else() message (FATAL_ERROR "cotire: unsupported ${_language} compiler ${_compilerID} version ${_compilerVersion}.") endif() set (${_flagsVar} ${_flags} PARENT_SCOPE) endfunction() function (cotire_precompile_prefix_header _prefixFile _pchFile _hostFile) set(_options "") set(_oneValueArgs COMPILER_EXECUTABLE COMPILER_ID COMPILER_VERSION INCLUDE_SYSTEM_FLAG LANGUAGE) set(_multiValueArgs COMPILE_DEFINITIONS COMPILE_FLAGS INCLUDE_DIRECTORIES SYSTEM_INCLUDE_DIRECTORIES SYS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if (NOT _option_LANGUAGE) set (_option_LANGUAGE "CXX") endif() if (NOT _option_COMPILER_ID) set (_option_COMPILER_ID "${CMAKE_${_option_LANGUAGE}_ID}") endif() cotire_init_compile_cmd(_cmd "${_option_LANGUAGE}" "${_option_COMPILER_EXECUTABLE}" "${_option_COMPILER_ARG1}") cotire_add_definitions_to_cmd(_cmd "${_option_LANGUAGE}" ${_option_COMPILE_DEFINITIONS}) cotire_add_compile_flags_to_cmd(_cmd ${_option_COMPILE_FLAGS}) cotire_add_includes_to_cmd(_cmd "${_option_LANGUAGE}" "${_option_INCLUDE_SYSTEM_FLAG}" _option_INCLUDE_DIRECTORIES _option_SYSTEM_INCLUDE_DIRECTORIES) cotire_add_frameworks_to_cmd(_cmd "${_option_LANGUAGE}" ${_option_INCLUDE_DIRECTORIES}) cotire_add_pch_compilation_flags( "${_option_LANGUAGE}" "${_option_COMPILER_ID}" "${_option_COMPILER_VERSION}" "${_prefixFile}" "${_pchFile}" "${_hostFile}" _cmd) if (COTIRE_VERBOSE) message (STATUS "execute_process: ${_cmd}") endif() if (_option_COMPILER_ID MATCHES "MSVC") if (COTIRE_DEBUG) message (STATUS "clearing VS_UNICODE_OUTPUT") endif() # cl.exe messes with the output streams unless the environment variable VS_UNICODE_OUTPUT is cleared unset (ENV{VS_UNICODE_OUTPUT}) endif() execute_process( COMMAND ${_cmd} WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" RESULT_VARIABLE _result) if (_result) message (FATAL_ERROR "cotire: error ${_result} precompiling ${_prefixFile}.") endif() endfunction() function (cotire_check_precompiled_header_support _language _targetSourceDir _target _msgVar) set (_unsupportedCompiler "Precompiled headers not supported for ${_language} compiler ${CMAKE_${_language}_COMPILER_ID}") if (CMAKE_${_language}_COMPILER_ID MATCHES "MSVC") # supported since Visual Studio C++ 6.0 # and CMake does not support an earlier version set (${_msgVar} "" PARENT_SCOPE) elseif (CMAKE_${_language}_COMPILER_ID MATCHES "GNU") # GCC PCH support requires version >= 3.4 cotire_determine_compiler_version("${_language}" COTIRE_${_language}_COMPILER) if ("${COTIRE_${_language}_COMPILER_VERSION}" MATCHES ".+" AND "${COTIRE_${_language}_COMPILER_VERSION}" VERSION_LESS "3.4.0") set (${_msgVar} "${_unsupportedCompiler} version ${COTIRE_${_language}_COMPILER_VERSION}." PARENT_SCOPE) else() set (${_msgVar} "" PARENT_SCOPE) endif() elseif (CMAKE_${_language}_COMPILER_ID MATCHES "Clang") # all Clang versions have PCH support set (${_msgVar} "" PARENT_SCOPE) elseif (CMAKE_${_language}_COMPILER_ID MATCHES "Intel") # Intel PCH support requires version >= 8.0.0 cotire_determine_compiler_version("${_language}" COTIRE_${_language}_COMPILER) if ("${COTIRE_${_language}_COMPILER_VERSION}" MATCHES ".+" AND "${COTIRE_${_language}_COMPILER_VERSION}" VERSION_LESS "8.0.0") set (${_msgVar} "${_unsupportedCompiler} version ${COTIRE_${_language}_COMPILER_VERSION}." PARENT_SCOPE) else() set (${_msgVar} "" PARENT_SCOPE) endif() else() set (${_msgVar} "${_unsupportedCompiler}." PARENT_SCOPE) endif() if (CMAKE_${_language}_COMPILER MATCHES "ccache") if (NOT "$ENV{CCACHE_SLOPPINESS}" MATCHES "time_macros|pch_defines") set (${_msgVar} "ccache requires the environment variable CCACHE_SLOPPINESS to be set to \"pch_defines,time_macros\"." PARENT_SCOPE) endif() endif() if (APPLE) # PCH compilation not supported by GCC / Clang for multi-architecture builds (e.g., i386, x86_64) cotire_get_configuration_types(_configs) foreach (_config ${_configs}) set (_targetFlags "") cotire_get_target_compile_flags("${_config}" "${_language}" "${_targetSourceDir}" "${_target}" _targetFlags) cotire_filter_compile_flags("${_language}" "arch" _architectures _ignore ${_targetFlags}) list (LENGTH _architectures _numberOfArchitectures) if (_numberOfArchitectures GREATER 1) string (REPLACE ";" ", " _architectureStr "${_architectures}") set (${_msgVar} "Precompiled headers not supported on Darwin for multi-architecture builds (${_architectureStr})." PARENT_SCOPE) break() endif() endforeach() endif() endfunction() macro (cotire_get_intermediate_dir _cotireDir) get_filename_component(${_cotireDir} "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/${COTIRE_INTDIR}" ABSOLUTE) endmacro() macro (cotire_setup_file_extension_variables) set (_unityFileExt_C ".c") set (_unityFileExt_CXX ".cxx") set (_prefixFileExt_C ".h") set (_prefixFileExt_CXX ".hxx") set (_prefixSourceFileExt_C ".c") set (_prefixSourceFileExt_CXX ".cxx") endmacro() function (cotire_make_single_unity_source_file_path _language _target _unityFileVar) cotire_setup_file_extension_variables() if (NOT DEFINED _unityFileExt_${_language}) set (${_unityFileVar} "" PARENT_SCOPE) return() endif() set (_unityFileBaseName "${_target}_${_language}${COTIRE_UNITY_SOURCE_FILENAME_SUFFIX}") set (_unityFileName "${_unityFileBaseName}${_unityFileExt_${_language}}") cotire_get_intermediate_dir(_baseDir) set (_unityFile "${_baseDir}/${_unityFileName}") set (${_unityFileVar} "${_unityFile}" PARENT_SCOPE) if (COTIRE_DEBUG) message(STATUS "${_unityFile}") endif() endfunction() function (cotire_make_unity_source_file_paths _language _target _maxIncludes _unityFilesVar) cotire_setup_file_extension_variables() if (NOT DEFINED _unityFileExt_${_language}) set (${_unityFileVar} "" PARENT_SCOPE) return() endif() set (_unityFileBaseName "${_target}_${_language}${COTIRE_UNITY_SOURCE_FILENAME_SUFFIX}") cotire_get_intermediate_dir(_baseDir) set (_startIndex 0) set (_index 0) set (_unityFiles "") set (_sourceFiles ${ARGN}) foreach (_sourceFile ${_sourceFiles}) get_source_file_property(_startNew "${_sourceFile}" COTIRE_START_NEW_UNITY_SOURCE) math (EXPR _unityFileCount "${_index} - ${_startIndex}") if (_startNew OR (_maxIncludes GREATER 0 AND NOT _unityFileCount LESS _maxIncludes)) if (_index GREATER 0) # start new unity file segment math (EXPR _endIndex "${_index} - 1") set (_unityFileName "${_unityFileBaseName}_${_startIndex}_${_endIndex}${_unityFileExt_${_language}}") list (APPEND _unityFiles "${_baseDir}/${_unityFileName}") endif() set (_startIndex ${_index}) endif() math (EXPR _index "${_index} + 1") endforeach() list (LENGTH _sourceFiles _numberOfSources) if (_startIndex EQUAL 0) # there is only a single unity file cotire_make_single_unity_source_file_path(${_language} ${_target} _unityFiles) elseif (_startIndex LESS _numberOfSources) # end with final unity file segment math (EXPR _endIndex "${_index} - 1") set (_unityFileName "${_unityFileBaseName}_${_startIndex}_${_endIndex}${_unityFileExt_${_language}}") list (APPEND _unityFiles "${_baseDir}/${_unityFileName}") endif() set (${_unityFilesVar} ${_unityFiles} PARENT_SCOPE) if (COTIRE_DEBUG) message(STATUS "${_unityFiles}") endif() endfunction() function (cotire_unity_to_prefix_file_path _language _target _unityFile _prefixFileVar) cotire_setup_file_extension_variables() if (NOT DEFINED _unityFileExt_${_language}) set (${_prefixFileVar} "" PARENT_SCOPE) return() endif() set (_unityFileBaseName "${_target}_${_language}${COTIRE_UNITY_SOURCE_FILENAME_SUFFIX}") set (_prefixFileBaseName "${_target}_${_language}${COTIRE_PREFIX_HEADER_FILENAME_SUFFIX}") string (REPLACE "${_unityFileBaseName}" "${_prefixFileBaseName}" _prefixFile "${_unityFile}") string (REGEX REPLACE "${_unityFileExt_${_language}}$" "${_prefixFileExt_${_language}}" _prefixFile "${_prefixFile}") set (${_prefixFileVar} "${_prefixFile}" PARENT_SCOPE) endfunction() function (cotire_prefix_header_to_source_file_path _language _prefixHeaderFile _prefixSourceFileVar) cotire_setup_file_extension_variables() if (NOT DEFINED _prefixSourceFileExt_${_language}) set (${_prefixSourceFileVar} "" PARENT_SCOPE) return() endif() string (REGEX REPLACE "${_prefixFileExt_${_language}}$" "${_prefixSourceFileExt_${_language}}" _prefixSourceFile "${_prefixHeaderFile}") set (${_prefixSourceFileVar} "${_prefixSourceFile}" PARENT_SCOPE) endfunction() function (cotire_make_prefix_file_name _language _target _prefixFileBaseNameVar _prefixFileNameVar) cotire_setup_file_extension_variables() if (NOT _language) set (_prefixFileBaseName "${_target}${COTIRE_PREFIX_HEADER_FILENAME_SUFFIX}") set (_prefixFileName "${_prefixFileBaseName}${_prefixFileExt_C}") elseif (DEFINED _prefixFileExt_${_language}) set (_prefixFileBaseName "${_target}_${_language}${COTIRE_PREFIX_HEADER_FILENAME_SUFFIX}") set (_prefixFileName "${_prefixFileBaseName}${_prefixFileExt_${_language}}") else() set (_prefixFileBaseName "") set (_prefixFileName "") endif() set (${_prefixFileBaseNameVar} "${_prefixFileBaseName}" PARENT_SCOPE) set (${_prefixFileNameVar} "${_prefixFileName}" PARENT_SCOPE) endfunction() function (cotire_make_prefix_file_path _language _target _prefixFileVar) cotire_make_prefix_file_name("${_language}" "${_target}" _prefixFileBaseName _prefixFileName) set (${_prefixFileVar} "" PARENT_SCOPE) if (_prefixFileName) if (NOT _language) set (_language "C") endif() if (MSVC OR CMAKE_${_language}_COMPILER_ID MATCHES "GNU|Clang|Intel") cotire_get_intermediate_dir(_baseDir) set (${_prefixFileVar} "${_baseDir}/${_prefixFileName}" PARENT_SCOPE) endif() endif() endfunction() function (cotire_make_pch_file_path _language _targetSourceDir _target _pchFileVar) cotire_make_prefix_file_name("${_language}" "${_target}" _prefixFileBaseName _prefixFileName) set (${_pchFileVar} "" PARENT_SCOPE) if (_prefixFileBaseName AND _prefixFileName) cotire_check_precompiled_header_support("${_language}" "${_targetSourceDir}" "${_target}" _msg) if (NOT _msg) if (XCODE) # For Xcode, we completely hand off the compilation of the prefix header to the IDE return() endif() cotire_get_intermediate_dir(_baseDir) if (CMAKE_${_language}_COMPILER_ID MATCHES "MSVC") # MSVC uses the extension .pch added to the prefix header base name set (${_pchFileVar} "${_baseDir}/${_prefixFileBaseName}.pch" PARENT_SCOPE) elseif (CMAKE_${_language}_COMPILER_ID MATCHES "Clang") # Clang looks for a precompiled header corresponding to the prefix header with the extension .pch appended set (${_pchFileVar} "${_baseDir}/${_prefixFileName}.pch" PARENT_SCOPE) elseif (CMAKE_${_language}_COMPILER_ID MATCHES "GNU") # GCC looks for a precompiled header corresponding to the prefix header with the extension .gch appended set (${_pchFileVar} "${_baseDir}/${_prefixFileName}.gch" PARENT_SCOPE) elseif (CMAKE_${_language}_COMPILER_ID MATCHES "Intel") # Intel uses the extension .pchi added to the prefix header base name set (${_pchFileVar} "${_baseDir}/${_prefixFileBaseName}.pchi" PARENT_SCOPE) endif() endif() endif() endfunction() function (cotire_select_unity_source_files _unityFile _sourcesVar) set (_sourceFiles ${ARGN}) if (_sourceFiles AND "${_unityFile}" MATCHES "${COTIRE_UNITY_SOURCE_FILENAME_SUFFIX}_([0-9]+)_([0-9]+)") set (_startIndex ${CMAKE_MATCH_1}) set (_endIndex ${CMAKE_MATCH_2}) list (LENGTH _sourceFiles _numberOfSources) if (NOT _startIndex LESS _numberOfSources) math (EXPR _startIndex "${_numberOfSources} - 1") endif() if (NOT _endIndex LESS _numberOfSources) math (EXPR _endIndex "${_numberOfSources} - 1") endif() set (_files "") foreach (_index RANGE ${_startIndex} ${_endIndex}) list (GET _sourceFiles ${_index} _file) list (APPEND _files "${_file}") endforeach() else() set (_files ${_sourceFiles}) endif() set (${_sourcesVar} ${_files} PARENT_SCOPE) endfunction() function (cotire_get_unity_source_dependencies _language _target _dependencySourcesVar) set (_dependencySources "") # depend on target's generated source files cotire_get_objects_with_property_on(_generatedSources GENERATED SOURCE ${ARGN}) if (_generatedSources) # but omit all generated source files that have the COTIRE_EXCLUDED property set to true cotire_get_objects_with_property_on(_excludedGeneratedSources COTIRE_EXCLUDED SOURCE ${_generatedSources}) if (_excludedGeneratedSources) list (REMOVE_ITEM _generatedSources ${_excludedGeneratedSources}) endif() # and omit all generated source files that have the COTIRE_DEPENDENCY property set to false explicitly cotire_get_objects_with_property_off(_excludedNonDependencySources COTIRE_DEPENDENCY SOURCE ${_generatedSources}) if (_excludedNonDependencySources) list (REMOVE_ITEM _generatedSources ${_excludedNonDependencySources}) endif() if (_generatedSources) list (APPEND _dependencySources ${_generatedSources}) endif() endif() if (COTIRE_DEBUG AND _dependencySources) message (STATUS "${_language} ${_target} unity source depends on ${_dependencySources}") endif() set (${_dependencySourcesVar} ${_dependencySources} PARENT_SCOPE) endfunction() function (cotire_get_prefix_header_dependencies _language _target _dependencySourcesVar) # depend on target source files marked with custom COTIRE_DEPENDENCY property set (_dependencySources "") cotire_get_objects_with_property_on(_dependencySources COTIRE_DEPENDENCY SOURCE ${ARGN}) if (CMAKE_${_language}_COMPILER_ID MATCHES "GNU|Clang") # GCC and clang raise a fatal error if a file is not found during preprocessing # thus we depend on target's generated source files for prefix header generation cotire_get_objects_with_property_on(_generatedSources GENERATED SOURCE ${ARGN}) if (_generatedSources) list (APPEND _dependencySources ${_generatedSources}) endif() endif() if (COTIRE_DEBUG AND _dependencySources) message (STATUS "${_language} ${_target} prefix header DEPENDS ${_dependencySources}") endif() set (${_dependencySourcesVar} ${_dependencySources} PARENT_SCOPE) endfunction() function (cotire_generate_target_script _language _configurations _targetSourceDir _targetBinaryDir _target _targetScriptVar _targetConfigScriptVar) set (COTIRE_TARGET_SOURCES ${ARGN}) cotire_get_source_file_property_values(COTIRE_TARGET_SOURCE_LOCATIONS LOCATION ${COTIRE_TARGET_SOURCES}) cotire_get_prefix_header_dependencies(${_language} ${_target} COTIRE_TARGET_PREFIX_DEPENDS ${COTIRE_TARGET_SOURCES}) cotire_get_unity_source_dependencies(${_language} ${_target} COTIRE_TARGET_UNITY_DEPENDS ${COTIRE_TARGET_SOURCES}) # set up variables to be configured set (COTIRE_TARGET_LANGUAGE "${_language}") cotire_determine_compiler_version("${COTIRE_TARGET_LANGUAGE}" COTIRE_${_language}_COMPILER) get_target_property(COTIRE_TARGET_IGNORE_PATH ${_target} COTIRE_PREFIX_HEADER_IGNORE_PATH) cotire_add_sys_root_paths(COTIRE_TARGET_IGNORE_PATH) get_target_property(COTIRE_TARGET_INCLUDE_PATH ${_target} COTIRE_PREFIX_HEADER_INCLUDE_PATH) cotire_add_sys_root_paths(COTIRE_TARGET_INCLUDE_PATH) get_target_property(COTIRE_TARGET_PRE_UNDEFS ${_target} COTIRE_UNITY_SOURCE_PRE_UNDEFS) get_target_property(COTIRE_TARGET_POST_UNDEFS ${_target} COTIRE_UNITY_SOURCE_POST_UNDEFS) get_target_property(COTIRE_TARGET_MAXIMUM_NUMBER_OF_INCLUDES ${_target} COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES) cotire_get_source_files_undefs(COTIRE_UNITY_SOURCE_PRE_UNDEFS COTIRE_TARGET_SOURCES_PRE_UNDEFS ${COTIRE_TARGET_SOURCES}) cotire_get_source_files_undefs(COTIRE_UNITY_SOURCE_POST_UNDEFS COTIRE_TARGET_SOURCES_POST_UNDEFS ${COTIRE_TARGET_SOURCES}) string (STRIP "${CMAKE_INCLUDE_SYSTEM_FLAG_${_language}}" COTIRE_INCLUDE_SYSTEM_FLAG) set (COTIRE_TARGET_CONFIGURATION_TYPES "${_configurations}") foreach (_config ${_configurations}) string (TOUPPER "${_config}" _upperConfig) cotire_get_target_include_directories( "${_config}" "${_language}" "${_targetSourceDir}" "${_targetBinaryDir}" "${_target}" COTIRE_TARGET_INCLUDE_DIRECTORIES_${_upperConfig} COTIRE_TARGET_SYSTEM_INCLUDE_DIRECTORIES_${_upperConfig}) cotire_get_target_compile_definitions( "${_config}" "${_language}" "${_targetSourceDir}" "${_target}" COTIRE_TARGET_COMPILE_DEFINITIONS_${_upperConfig}) cotire_get_target_compiler_flags( "${_config}" "${_language}" "${_targetSourceDir}" "${_target}" COTIRE_TARGET_COMPILE_FLAGS_${_upperConfig}) cotire_get_source_files_compile_definitions( "${_config}" "${_language}" COTIRE_TARGET_SOURCES_COMPILE_DEFINITIONS_${_upperConfig} ${COTIRE_TARGET_SOURCES}) endforeach() get_cmake_property(_vars VARIABLES) string (REGEX MATCHALL "COTIRE_[A-Za-z0-9_]+" _matchVars "${_vars}") # remove COTIRE_VERBOSE which is passed as a CMake define on command line list (REMOVE_ITEM _matchVars COTIRE_VERBOSE) set (_contents "") set (_contentsHasGeneratorExpressions FALSE) foreach (_var IN LISTS _matchVars ITEMS XCODE MSVC CMAKE_GENERATOR CMAKE_BUILD_TYPE CMAKE_CONFIGURATION_TYPES CMAKE_${_language}_COMPILER_ID CMAKE_${_language}_COMPILER CMAKE_${_language}_COMPILER_ARG1 CMAKE_${_language}_SOURCE_FILE_EXTENSIONS) if (DEFINED ${_var}) string (REPLACE "\"" "\\\"" _value "${${_var}}") set (_contents "${_contents}set (${_var} \"${_value}\")\n") if (NOT _contentsHasGeneratorExpressions) if ("${_value}" MATCHES "\\$<.*>") set (_contentsHasGeneratorExpressions TRUE) endif() endif() endif() endforeach() get_filename_component(_moduleName "${COTIRE_CMAKE_MODULE_FILE}" NAME) set (_targetCotireScript "${CMAKE_CURRENT_BINARY_DIR}/${_target}_${_language}_${_moduleName}") cotire_write_file("CMAKE" "${_targetCotireScript}" "${_contents}" FALSE) if (_contentsHasGeneratorExpressions) # use file(GENERATE ...) to expand generator expressions in the target script at CMake generate-time if (NOT CMAKE_VERSION VERSION_LESS "2.8.12") # the file(GENERATE ...) command requires cmake 2.8.12 or later set (_configNameOrNoneGeneratorExpression "$<$:None>$<$>:$>") set (_targetCotireConfigScript "${CMAKE_CURRENT_BINARY_DIR}/${_target}_${_language}_${_configNameOrNoneGeneratorExpression}_${_moduleName}") file (GENERATE OUTPUT "${_targetCotireConfigScript}" INPUT "${_targetCotireScript}") else() message (WARNING "cotire: generator expression used in target ${_target}. This requires CMake 2.8.12 or later.") set (_targetCotireConfigScript "${_targetCotireScript}") endif() else() set (_targetCotireConfigScript "${_targetCotireScript}") endif() set (${_targetScriptVar} "${_targetCotireScript}" PARENT_SCOPE) set (${_targetConfigScriptVar} "${_targetCotireConfigScript}" PARENT_SCOPE) endfunction() function (cotire_setup_pch_file_compilation _language _target _targetSourceDir _targetScript _prefixFile _pchFile) set (_sourceFiles ${ARGN}) if (CMAKE_${_language}_COMPILER_ID MATCHES "MSVC|Intel") # for Visual Studio and Intel, we attach the precompiled header compilation to the first source file # the remaining files include the precompiled header, see cotire_setup_pch_file_inclusion if (_sourceFiles) file (TO_NATIVE_PATH "${_prefixFile}" _prefixFileNative) file (TO_NATIVE_PATH "${_pchFile}" _pchFileNative) list (GET _sourceFiles 0 _hostFile) set (_flags "") cotire_determine_compiler_version("${_language}" COTIRE_${_language}_COMPILER) cotire_add_pch_compilation_flags( "${_language}" "${CMAKE_${_language}_COMPILER_ID}" "${COTIRE_${_language}_COMPILER_VERSION}" "${_prefixFile}" "${_pchFile}" "${_hostFile}" _flags) set_property (SOURCE ${_hostFile} APPEND_STRING PROPERTY COMPILE_FLAGS " ${_flags} ") set_property (SOURCE ${_hostFile} APPEND PROPERTY OBJECT_OUTPUTS "${_pchFile}") # make first source file depend on prefix header set_property (SOURCE ${_hostFile} APPEND PROPERTY OBJECT_DEPENDS "${_prefixFile}") # mark first source file as cotired to prevent it from being used in another cotired target set_property (SOURCE ${_hostFile} PROPERTY COTIRE_TARGET "${_target}") endif() elseif ("${CMAKE_GENERATOR}" MATCHES "Makefiles|Ninja") # for makefile based generator, we add a custom command to precompile the prefix header if (_targetScript) cotire_set_cmd_to_prologue(_cmds) list (GET _sourceFiles 0 _hostFile) list (APPEND _cmds -P "${COTIRE_CMAKE_MODULE_FILE}" "precompile" "${_targetScript}" "${_prefixFile}" "${_pchFile}" "${_hostFile}") file (RELATIVE_PATH _pchFileRelPath "${CMAKE_BINARY_DIR}" "${_pchFile}") if (COTIRE_DEBUG) message (STATUS "add_custom_command: OUTPUT ${_pchFile} ${_cmds} DEPENDS ${_prefixFile} IMPLICIT_DEPENDS ${_language} ${_prefixFile}") endif() set_property (SOURCE "${_pchFile}" PROPERTY GENERATED TRUE) add_custom_command( OUTPUT "${_pchFile}" COMMAND ${_cmds} DEPENDS "${_prefixFile}" IMPLICIT_DEPENDS ${_language} "${_prefixFile}" WORKING_DIRECTORY "${_targetSourceDir}" COMMENT "Building ${_language} precompiled header ${_pchFileRelPath}" VERBATIM) endif() endif() endfunction() function (cotire_setup_pch_file_inclusion _language _target _wholeTarget _prefixFile _pchFile) set (_sourceFiles ${ARGN}) if (CMAKE_${_language}_COMPILER_ID MATCHES "MSVC|Intel") # for Visual Studio and Intel, we include the precompiled header in all but the first source file # the first source file does the precompiled header compilation, see cotire_setup_pch_file_compilation list (LENGTH _sourceFiles _numberOfSourceFiles) if (_numberOfSourceFiles GREATER 1) # mark sources as cotired to prevent them from being used in another cotired target set_source_files_properties(${_sourceFiles} PROPERTIES COTIRE_TARGET "${_target}") list (REMOVE_AT _sourceFiles 0) set (_flags "") cotire_determine_compiler_version("${_language}" COTIRE_${_language}_COMPILER) cotire_add_prefix_pch_inclusion_flags( "${_language}" "${CMAKE_${_language}_COMPILER_ID}" "${COTIRE_${_language}_COMPILER_VERSION}" "${_prefixFile}" "${_pchFile}" _flags) set_property (SOURCE ${_sourceFiles} APPEND_STRING PROPERTY COMPILE_FLAGS " ${_flags} ") # make source files depend on precompiled header set_property (SOURCE ${_sourceFiles} APPEND PROPERTY OBJECT_DEPENDS "${_pchFile}") endif() elseif ("${CMAKE_GENERATOR}" MATCHES "Makefiles|Ninja") if (NOT _wholeTarget) # for makefile based generator, we force the inclusion of the prefix header for a subset # of the source files, if this is a multi-language target or has excluded files set (_flags "") cotire_determine_compiler_version("${_language}" COTIRE_${_language}_COMPILER) cotire_add_prefix_pch_inclusion_flags( "${_language}" "${CMAKE_${_language}_COMPILER_ID}" "${COTIRE_${_language}_COMPILER_VERSION}" "${_prefixFile}" "${_pchFile}" _flags) set_property (SOURCE ${_sourceFiles} APPEND_STRING PROPERTY COMPILE_FLAGS " ${_flags} ") # mark sources as cotired to prevent them from being used in another cotired target set_source_files_properties(${_sourceFiles} PROPERTIES COTIRE_TARGET "${_target}") endif() # make source files depend on precompiled header set_property (SOURCE ${_sourceFiles} APPEND PROPERTY OBJECT_DEPENDS "${_pchFile}") endif() endfunction() function (cotire_setup_prefix_file_inclusion _language _target _prefixFile) set (_sourceFiles ${ARGN}) # force the inclusion of the prefix header for the given source files set (_flags "") cotire_determine_compiler_version("${_language}" COTIRE_${_language}_COMPILER) cotire_add_prefix_pch_inclusion_flags( "${_language}" "${CMAKE_${_language}_COMPILER_ID}" "${COTIRE_${_language}_COMPILER_VERSION}" "${_prefixFile}" "" _flags) set_property (SOURCE ${_sourceFiles} APPEND_STRING PROPERTY COMPILE_FLAGS " ${_flags} ") # mark sources as cotired to prevent them from being used in another cotired target set_source_files_properties(${_sourceFiles} PROPERTIES COTIRE_TARGET "${_target}") # make source files depend on prefix header set_property (SOURCE ${_sourceFiles} APPEND PROPERTY OBJECT_DEPENDS "${_prefixFile}") endfunction() function (cotire_get_first_set_property_value _propertyValueVar _type _object) set (_properties ${ARGN}) foreach (_property ${_properties}) get_property(_propertyValue ${_type} "${_object}" PROPERTY ${_property}) if (_propertyValue) set (${_propertyValueVar} ${_propertyValue} PARENT_SCOPE) return() endif() endforeach() set (${_propertyValueVar} "" PARENT_SCOPE) endfunction() function (cotire_setup_combine_command _language _sourceDir _targetScript _joinedFile _cmdsVar) set (_files ${ARGN}) set (_filesPaths "") foreach (_file ${_files}) if (IS_ABSOLUTE "${_file}") set (_filePath "${_file}") else() get_filename_component(_filePath "${_sourceDir}/${_file}" ABSOLUTE) endif() file (RELATIVE_PATH _fileRelPath "${_sourceDir}" "${_filePath}") if (NOT IS_ABSOLUTE "${_fileRelPath}" AND NOT "${_fileRelPath}" MATCHES "^\\.\\.") list (APPEND _filesPaths "${_fileRelPath}") else() list (APPEND _filesPaths "${_filePath}") endif() endforeach() cotire_set_cmd_to_prologue(_prefixCmd) list (APPEND _prefixCmd -P "${COTIRE_CMAKE_MODULE_FILE}" "combine") if (_targetScript) list (APPEND _prefixCmd "${_targetScript}") endif() list (APPEND _prefixCmd "${_joinedFile}" ${_filesPaths}) if (COTIRE_DEBUG) message (STATUS "add_custom_command: OUTPUT ${_joinedFile} COMMAND ${_prefixCmd} DEPENDS ${_files}") endif() set_property (SOURCE "${_joinedFile}" PROPERTY GENERATED TRUE) file (RELATIVE_PATH _joinedFileRelPath "${CMAKE_BINARY_DIR}" "${_joinedFile}") get_filename_component(_joinedFileBaseName "${_joinedFile}" NAME_WE) get_filename_component(_joinedFileExt "${_joinedFile}" EXT) if (_language AND _joinedFileBaseName MATCHES "${COTIRE_UNITY_SOURCE_FILENAME_SUFFIX}$") set (_comment "Generating ${_language} unity source ${_joinedFileRelPath}") elseif (_language AND _joinedFileBaseName MATCHES "${COTIRE_PREFIX_HEADER_FILENAME_SUFFIX}$") if (_joinedFileExt MATCHES "^\\.c") set (_comment "Generating ${_language} prefix source ${_joinedFileRelPath}") else() set (_comment "Generating ${_language} prefix header ${_joinedFileRelPath}") endif() else() set (_comment "Generating ${_joinedFileRelPath}") endif() add_custom_command( OUTPUT "${_joinedFile}" COMMAND ${_prefixCmd} DEPENDS ${_files} COMMENT "${_comment}" WORKING_DIRECTORY "${_sourceDir}" VERBATIM) list (APPEND ${_cmdsVar} COMMAND ${_prefixCmd}) set (${_cmdsVar} ${${_cmdsVar}} PARENT_SCOPE) endfunction() function (cotire_setup_target_pch_usage _languages _targetSourceDir _target _wholeTarget) if (XCODE) # for Xcode, we attach a pre-build action to generate the unity sources and prefix headers # if necessary, we also generate a single prefix header which includes all language specific prefix headers set (_prefixFiles "") foreach (_language ${_languages}) get_property(_prefixFile TARGET ${_target} PROPERTY COTIRE_${_language}_PREFIX_HEADER) if (_prefixFile) list (APPEND _prefixFiles "${_prefixFile}") endif() endforeach() set (_cmds ${ARGN}) list (LENGTH _prefixFiles _numberOfPrefixFiles) if (_numberOfPrefixFiles GREATER 1) cotire_make_prefix_file_path("" ${_target} _prefixHeader) cotire_setup_combine_command("" "${_targetSourceDir}" "" "${_prefixHeader}" _cmds ${_prefixFiles}) else() set (_prefixHeader "${_prefixFiles}") endif() if (COTIRE_DEBUG) message (STATUS "add_custom_command: TARGET ${_target} PRE_BUILD ${_cmds}") endif() add_custom_command(TARGET "${_target}" PRE_BUILD ${_cmds} WORKING_DIRECTORY "${_targetSourceDir}" COMMENT "Updating target ${_target} prefix headers" VERBATIM) # make Xcode precompile the generated prefix header with ProcessPCH and ProcessPCH++ set_target_properties(${_target} PROPERTIES XCODE_ATTRIBUTE_GCC_PRECOMPILE_PREFIX_HEADER "YES") set_target_properties(${_target} PROPERTIES XCODE_ATTRIBUTE_GCC_PREFIX_HEADER "${_prefixHeader}") elseif ("${CMAKE_GENERATOR}" MATCHES "Makefiles|Ninja") # for makefile based generator, we force inclusion of the prefix header for all target source files # if this is a single-language target without any excluded files if (_wholeTarget) set (_language "${_languages}") # for Visual Studio and Intel, precompiled header inclusion is always done on the source file level # see cotire_setup_pch_file_inclusion if (NOT CMAKE_${_language}_COMPILER_ID MATCHES "MSVC|Intel") get_property(_prefixFile TARGET ${_target} PROPERTY COTIRE_${_language}_PREFIX_HEADER) if (_prefixFile) set (_flags "") cotire_determine_compiler_version("${_language}" COTIRE_${_language}_COMPILER) get_property(_pchFile TARGET ${_target} PROPERTY COTIRE_${_language}_PRECOMPILED_HEADER) cotire_add_prefix_pch_inclusion_flags( "${_language}" "${CMAKE_${_language}_COMPILER_ID}" "${COTIRE_${_language}_COMPILER_VERSION}" "${_prefixFile}" "${_pchFile}" _flags) set_property(TARGET ${_target} APPEND_STRING PROPERTY COMPILE_FLAGS " ${_flags} ") endif() endif() endif() endif() endfunction() function (cotire_setup_unity_generation_commands _language _targetSourceDir _target _targetScript _targetConfigScript _unityFiles _cmdsVar) set (_dependencySources "") cotire_get_unity_source_dependencies(${_language} ${_target} _dependencySources ${ARGN}) foreach (_unityFile ${_unityFiles}) file (RELATIVE_PATH _unityFileRelPath "${CMAKE_BINARY_DIR}" "${_unityFile}") set_property (SOURCE "${_unityFile}" PROPERTY GENERATED TRUE) # set up compiled unity source dependencies via OBJECT_DEPENDS # this ensures that missing source files are generated before the unity file is compiled if (COTIRE_DEBUG AND _dependencySources) message (STATUS "${_unityFile} OBJECT_DEPENDS ${_dependencySources}") endif() if (_dependencySources) # the OBJECT_DEPENDS property requires a list of full paths set (_objectDependsPaths "") foreach (_sourceFile ${_dependencySources}) get_source_file_property(_sourceLocation "${_sourceFile}" LOCATION) list (APPEND _objectDependsPaths "${_sourceLocation}") endforeach() set_property (SOURCE "${_unityFile}" PROPERTY OBJECT_DEPENDS ${_objectDependsPaths}) endif() if (WIN32 AND CMAKE_${_language}_COMPILER_ID MATCHES "MSVC|Intel") # unity file compilation results in potentially huge object file, thus use /bigobj by default unter MSVC and Windows Intel set_property (SOURCE "${_unityFile}" APPEND_STRING PROPERTY COMPILE_FLAGS "/bigobj") endif() cotire_set_cmd_to_prologue(_unityCmd) list (APPEND _unityCmd -P "${COTIRE_CMAKE_MODULE_FILE}" "unity" "${_targetConfigScript}" "${_unityFile}") if (CMAKE_VERSION VERSION_LESS "3.1.0") set (_unityCmdDepends "${_targetScript}") else() # CMake 3.1.0 supports generator expressions in arguments to DEPENDS set (_unityCmdDepends "${_targetConfigScript}") endif() if (COTIRE_DEBUG) message (STATUS "add_custom_command: OUTPUT ${_unityFile} COMMAND ${_unityCmd} DEPENDS ${_unityCmdDepends}") endif() add_custom_command( OUTPUT "${_unityFile}" COMMAND ${_unityCmd} DEPENDS ${_unityCmdDepends} COMMENT "Generating ${_language} unity source ${_unityFileRelPath}" WORKING_DIRECTORY "${_targetSourceDir}" VERBATIM) list (APPEND ${_cmdsVar} COMMAND ${_unityCmd}) endforeach() list (LENGTH _unityFiles _numberOfUnityFiles) if (_numberOfUnityFiles GREATER 1) # create a joint unity file from all unity file segments cotire_make_single_unity_source_file_path(${_language} ${_target} _unityFile) cotire_setup_combine_command(${_language} "${_targetSourceDir}" "${_targetConfigScript}" "${_unityFile}" ${_cmdsVar} ${_unityFiles}) endif() set (${_cmdsVar} ${${_cmdsVar}} PARENT_SCOPE) endfunction() function (cotire_setup_prefix_generation_command _language _target _targetSourceDir _targetScript _prefixFile _unityFile _cmdsVar) set (_sourceFiles ${ARGN}) set (_dependencySources "") cotire_get_prefix_header_dependencies(${_language} ${_target} _dependencySources ${_sourceFiles}) cotire_set_cmd_to_prologue(_prefixCmd) list (APPEND _prefixCmd -P "${COTIRE_CMAKE_MODULE_FILE}" "prefix" "${_targetScript}" "${_prefixFile}" "${_unityFile}") set_property (SOURCE "${_prefixFile}" PROPERTY GENERATED TRUE) if (COTIRE_DEBUG) message (STATUS "add_custom_command: OUTPUT ${_prefixFile} COMMAND ${_prefixCmd} DEPENDS ${_unityFile} ${_dependencySources}") endif() file (RELATIVE_PATH _prefixFileRelPath "${CMAKE_BINARY_DIR}" "${_prefixFile}") get_filename_component(_prefixFileExt "${_prefixFile}" EXT) if (_prefixFileExt MATCHES "^\\.c") set (_comment "Generating ${_language} prefix source ${_prefixFileRelPath}") else() set (_comment "Generating ${_language} prefix header ${_prefixFileRelPath}") endif() add_custom_command( OUTPUT "${_prefixFile}" "${_prefixFile}.log" COMMAND ${_prefixCmd} DEPENDS "${_unityFile}" ${_dependencySources} COMMENT "${_comment}" WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" VERBATIM) list (APPEND ${_cmdsVar} COMMAND ${_prefixCmd}) set (${_cmdsVar} ${${_cmdsVar}} PARENT_SCOPE) endfunction() function (cotire_setup_prefix_generation_from_unity_command _language _target _targetSourceDir _targetScript _prefixFile _unityFiles _cmdsVar) set (_sourceFiles ${ARGN}) if (CMAKE_${_language}_COMPILER_ID MATCHES "GNU|Clang") # GNU and Clang require indirect compilation of the prefix header to make them honor the system_header pragma cotire_prefix_header_to_source_file_path(${_language} "${_prefixFile}" _prefixSourceFile) else() set (_prefixSourceFile "${_prefixFile}") endif() list (LENGTH _unityFiles _numberOfUnityFiles) if (_numberOfUnityFiles GREATER 1) cotire_make_single_unity_source_file_path(${_language} ${_target} _unityFile) cotire_setup_prefix_generation_command( ${_language} ${_target} "${_targetSourceDir}" "${_targetScript}" "${_prefixSourceFile}" "${_unityFile}" ${_cmdsVar} ${_sourceFiles}) else() cotire_setup_prefix_generation_command( ${_language} ${_target} "${_targetSourceDir}" "${_targetScript}" "${_prefixSourceFile}" "${_unityFiles}" ${_cmdsVar} ${_sourceFiles}) endif() if (CMAKE_${_language}_COMPILER_ID MATCHES "GNU|Clang") cotire_setup_combine_command(${_language} "${_targetSourceDir}" "${_targetScript}" "${_prefixFile}" ${_cmdsVar} ${_prefixSourceFile}) endif() set (${_cmdsVar} ${${_cmdsVar}} PARENT_SCOPE) endfunction() function (cotire_setup_prefix_generation_from_provided_command _language _target _targetSourceDir _targetScript _prefixFile _cmdsVar) set (_prefixHeaderFiles ${ARGN}) if (CMAKE_${_language}_COMPILER_ID MATCHES "GNU|Clang") # GNU and Clang require indirect compilation of the prefix header to make them honor the system_header pragma cotire_prefix_header_to_source_file_path(${_language} "${_prefixFile}" _prefixSourceFile) else() set (_prefixSourceFile "${_prefixFile}") endif() cotire_setup_combine_command(${_language} "${_targetSourceDir}" "${_targetScript}" "${_prefixSourceFile}" _cmds ${_prefixHeaderFiles}) if (CMAKE_${_language}_COMPILER_ID MATCHES "GNU|Clang") cotire_setup_combine_command(${_language} "${_targetSourceDir}" "${_targetScript}" "${_prefixFile}" _cmds ${_prefixSourceFile}) endif() set (${_cmdsVar} ${${_cmdsVar}} PARENT_SCOPE) endfunction() function (cotire_init_cotire_target_properties _target) get_property(_isSet TARGET ${_target} PROPERTY COTIRE_ENABLE_PRECOMPILED_HEADER SET) if (NOT _isSet) set_property(TARGET ${_target} PROPERTY COTIRE_ENABLE_PRECOMPILED_HEADER TRUE) endif() get_property(_isSet TARGET ${_target} PROPERTY COTIRE_ADD_UNITY_BUILD SET) if (NOT _isSet) set_property(TARGET ${_target} PROPERTY COTIRE_ADD_UNITY_BUILD TRUE) endif() get_property(_isSet TARGET ${_target} PROPERTY COTIRE_ADD_CLEAN SET) if (NOT _isSet) set_property(TARGET ${_target} PROPERTY COTIRE_ADD_CLEAN FALSE) endif() get_property(_isSet TARGET ${_target} PROPERTY COTIRE_PREFIX_HEADER_IGNORE_PATH SET) if (NOT _isSet) set_property(TARGET ${_target} PROPERTY COTIRE_PREFIX_HEADER_IGNORE_PATH "${CMAKE_SOURCE_DIR}") cotire_check_is_path_relative_to("${CMAKE_BINARY_DIR}" _isRelative "${CMAKE_SOURCE_DIR}") if (NOT _isRelative) set_property(TARGET ${_target} APPEND PROPERTY COTIRE_PREFIX_HEADER_IGNORE_PATH "${CMAKE_BINARY_DIR}") endif() endif() get_property(_isSet TARGET ${_target} PROPERTY COTIRE_PREFIX_HEADER_INCLUDE_PATH SET) if (NOT _isSet) set_property(TARGET ${_target} PROPERTY COTIRE_PREFIX_HEADER_INCLUDE_PATH "") endif() get_property(_isSet TARGET ${_target} PROPERTY COTIRE_UNITY_SOURCE_PRE_UNDEFS SET) if (NOT _isSet) set_property(TARGET ${_target} PROPERTY COTIRE_UNITY_SOURCE_PRE_UNDEFS "") endif() get_property(_isSet TARGET ${_target} PROPERTY COTIRE_UNITY_SOURCE_POST_UNDEFS SET) if (NOT _isSet) set_property(TARGET ${_target} PROPERTY COTIRE_UNITY_SOURCE_POST_UNDEFS "") endif() get_property(_isSet TARGET ${_target} PROPERTY COTIRE_UNITY_LINK_LIBRARIES_INIT SET) if (NOT _isSet) set_property(TARGET ${_target} PROPERTY COTIRE_UNITY_LINK_LIBRARIES_INIT "") endif() get_property(_isSet TARGET ${_target} PROPERTY COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES SET) if (NOT _isSet) if (COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES) set_property(TARGET ${_target} PROPERTY COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES "${COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES}") else() set_property(TARGET ${_target} PROPERTY COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES "") endif() endif() endfunction() function (cotire_make_target_message _target _languages _disableMsg _targetMsgVar) get_target_property(_targetUsePCH ${_target} COTIRE_ENABLE_PRECOMPILED_HEADER) get_target_property(_targetAddSCU ${_target} COTIRE_ADD_UNITY_BUILD) string (REPLACE ";" " " _languagesStr "${_languages}") math (EXPR _numberOfExcludedFiles "${ARGC} - 4") if (_numberOfExcludedFiles EQUAL 0) set (_excludedStr "") elseif (COTIRE_VERBOSE OR _numberOfExcludedFiles LESS 4) string (REPLACE ";" ", " _excludedStr "excluding ${ARGN}") else() set (_excludedStr "excluding ${_numberOfExcludedFiles} files") endif() set (_targetMsg "") if (NOT _languages) set (_targetMsg "Target ${_target} cannot be cotired.") if (_disableMsg) set (_targetMsg "${_targetMsg} ${_disableMsg}") endif() elseif (NOT _targetUsePCH AND NOT _targetAddSCU) set (_targetMsg "${_languagesStr} target ${_target} cotired without unity build and precompiled header.") if (_disableMsg) set (_targetMsg "${_targetMsg} ${_disableMsg}") endif() elseif (NOT _targetUsePCH) if (_excludedStr) set (_targetMsg "${_languagesStr} target ${_target} cotired without precompiled header ${_excludedStr}.") else() set (_targetMsg "${_languagesStr} target ${_target} cotired without precompiled header.") endif() if (_disableMsg) set (_targetMsg "${_targetMsg} ${_disableMsg}") endif() elseif (NOT _targetAddSCU) if (_excludedStr) set (_targetMsg "${_languagesStr} target ${_target} cotired without unity build ${_excludedStr}.") else() set (_targetMsg "${_languagesStr} target ${_target} cotired without unity build.") endif() else() if (_excludedStr) set (_targetMsg "${_languagesStr} target ${_target} cotired ${_excludedStr}.") else() set (_targetMsg "${_languagesStr} target ${_target} cotired.") endif() endif() set (${_targetMsgVar} "${_targetMsg}" PARENT_SCOPE) endfunction() function (cotire_choose_target_languages _targetSourceDir _target _targetLanguagesVar _wholeTargetVar) set (_languages ${ARGN}) set (_allSourceFiles "") set (_allExcludedSourceFiles "") set (_allCotiredSourceFiles "") set (_targetLanguages "") set (_pchEligibleTargetLanguages "") get_target_property(_targetType ${_target} TYPE) get_target_property(_targetSourceFiles ${_target} SOURCES) get_target_property(_targetUsePCH ${_target} COTIRE_ENABLE_PRECOMPILED_HEADER) get_target_property(_targetAddSCU ${_target} COTIRE_ADD_UNITY_BUILD) set (_disableMsg "") foreach (_language ${_languages}) get_target_property(_prefixHeader ${_target} COTIRE_${_language}_PREFIX_HEADER) get_target_property(_unityBuildFile ${_target} COTIRE_${_language}_UNITY_SOURCE) if (_prefixHeader OR _unityBuildFile) message (STATUS "cotire: target ${_target} has already been cotired.") set (${_targetLanguagesVar} "" PARENT_SCOPE) return() endif() if (_targetUsePCH AND "${_language}" MATCHES "^C|CXX$") cotire_check_precompiled_header_support("${_language}" "${_targetSourceDir}" "${_target}" _disableMsg) if (_disableMsg) set (_targetUsePCH FALSE) endif() endif() set (_sourceFiles "") set (_excludedSources "") set (_cotiredSources "") cotire_filter_language_source_files(${_language} _sourceFiles _excludedSources _cotiredSources ${_targetSourceFiles}) if (_sourceFiles OR _excludedSources OR _cotiredSources) list (APPEND _targetLanguages ${_language}) endif() if (_sourceFiles) list (APPEND _allSourceFiles ${_sourceFiles}) endif() list (LENGTH _sourceFiles _numberOfSources) if (NOT _numberOfSources LESS ${COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES}) list (APPEND _pchEligibleTargetLanguages ${_language}) endif() if (_excludedSources) list (APPEND _allExcludedSourceFiles ${_excludedSources}) endif() if (_cotiredSources) list (APPEND _allCotiredSourceFiles ${_cotiredSources}) endif() endforeach() set (_targetMsgLevel STATUS) if (NOT _targetLanguages) string (REPLACE ";" " or " _languagesStr "${_languages}") set (_disableMsg "No ${_languagesStr} source files.") set (_targetUsePCH FALSE) set (_targetAddSCU FALSE) endif() if (_targetUsePCH) if (_allCotiredSourceFiles) cotire_get_source_file_property_values(_cotireTargets COTIRE_TARGET ${_allCotiredSourceFiles}) list (REMOVE_DUPLICATES _cotireTargets) string (REPLACE ";" ", " _cotireTargetsStr "${_cotireTargets}") set (_disableMsg "Target sources already include a precompiled header for target(s) ${_cotireTargets}.") set (_disableMsg "${_disableMsg} Set target property COTIRE_ENABLE_PRECOMPILED_HEADER to FALSE for targets ${_target},") set (_disableMsg "${_disableMsg} ${_cotireTargetsStr} to get a workable build system.") set (_targetMsgLevel SEND_ERROR) set (_targetUsePCH FALSE) elseif (NOT _pchEligibleTargetLanguages) set (_disableMsg "Too few applicable sources.") set (_targetUsePCH FALSE) elseif (XCODE AND _allExcludedSourceFiles) # for Xcode, we cannot apply the precompiled header to individual sources, only to the whole target set (_disableMsg "Exclusion of source files not supported for generator Xcode.") set (_targetUsePCH FALSE) elseif (XCODE AND "${_targetType}" STREQUAL "OBJECT_LIBRARY") # for Xcode, we cannot apply the required PRE_BUILD action to generate the prefix header to an OBJECT_LIBRARY target set (_disableMsg "Required PRE_BUILD action not supported for OBJECT_LIBRARY targets for generator Xcode.") set (_targetUsePCH FALSE) endif() endif() set_property(TARGET ${_target} PROPERTY COTIRE_ENABLE_PRECOMPILED_HEADER ${_targetUsePCH}) set_property(TARGET ${_target} PROPERTY COTIRE_ADD_UNITY_BUILD ${_targetAddSCU}) cotire_make_target_message(${_target} "${_targetLanguages}" "${_disableMsg}" _targetMsg ${_allExcludedSourceFiles}) if (_targetMsg) if (NOT DEFINED COTIREMSG_${_target}) set (COTIREMSG_${_target} "") endif() if (COTIRE_VERBOSE OR NOT "${_targetMsgLevel}" STREQUAL "STATUS" OR NOT "${COTIREMSG_${_target}}" STREQUAL "${_targetMsg}") # cache message to avoid redundant messages on re-configure set (COTIREMSG_${_target} "${_targetMsg}" CACHE INTERNAL "${_target} cotire message.") message (${_targetMsgLevel} "${_targetMsg}") endif() endif() list (LENGTH _targetLanguages _numberOfLanguages) if (_numberOfLanguages GREATER 1 OR _allExcludedSourceFiles) set (${_wholeTargetVar} FALSE PARENT_SCOPE) else() set (${_wholeTargetVar} TRUE PARENT_SCOPE) endif() set (${_targetLanguagesVar} ${_targetLanguages} PARENT_SCOPE) endfunction() function (cotire_compute_unity_max_number_of_includes _target _maxIncludesVar) set (_sourceFiles ${ARGN}) get_target_property(_maxIncludes ${_target} COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES) if (_maxIncludes MATCHES "(-j|--parallel|--jobs) ?([0-9]*)") set (_numberOfThreads "${CMAKE_MATCH_2}") if (NOT _numberOfThreads) # use all available cores ProcessorCount(_numberOfThreads) endif() list (LENGTH _sourceFiles _numberOfSources) math (EXPR _maxIncludes "(${_numberOfSources} + ${_numberOfThreads} - 1) / ${_numberOfThreads}") # a unity source segment must not contain less than COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES files if (_maxIncludes LESS ${COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES}) set (_maxIncludes ${COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES}) endif() elseif (NOT _maxIncludes MATCHES "[0-9]+") set (_maxIncludes 0) endif() if (COTIRE_DEBUG) message (STATUS "${_target} unity source max includes = ${_maxIncludes}") endif() set (${_maxIncludesVar} ${_maxIncludes} PARENT_SCOPE) endfunction() function (cotire_process_target_language _language _configurations _targetSourceDir _targetBinaryDir _target _wholeTarget _cmdsVar) set (${_cmdsVar} "" PARENT_SCOPE) get_target_property(_targetSourceFiles ${_target} SOURCES) set (_sourceFiles "") set (_excludedSources "") set (_cotiredSources "") cotire_filter_language_source_files(${_language} _sourceFiles _excludedSources _cotiredSources ${_targetSourceFiles}) if (NOT _sourceFiles AND NOT _cotiredSources) return() endif() set (_cmds "") # check for user provided unity source file list get_property(_unitySourceFiles TARGET ${_target} PROPERTY COTIRE_${_language}_UNITY_SOURCE_INIT) if (NOT _unitySourceFiles) set (_unitySourceFiles ${_sourceFiles} ${_cotiredSources}) endif() cotire_generate_target_script( ${_language} "${_configurations}" "${_targetSourceDir}" "${_targetBinaryDir}" ${_target} _targetScript _targetConfigScript ${_unitySourceFiles}) cotire_compute_unity_max_number_of_includes(${_target} _maxIncludes ${_unitySourceFiles}) cotire_make_unity_source_file_paths(${_language} ${_target} ${_maxIncludes} _unityFiles ${_unitySourceFiles}) if (NOT _unityFiles) return() endif() cotire_setup_unity_generation_commands( ${_language} "${_targetSourceDir}" ${_target} "${_targetScript}" "${_targetConfigScript}" "${_unityFiles}" _cmds ${_unitySourceFiles}) cotire_make_prefix_file_path(${_language} ${_target} _prefixFile) if (_prefixFile) # check for user provided prefix header files get_property(_prefixHeaderFiles TARGET ${_target} PROPERTY COTIRE_${_language}_PREFIX_HEADER_INIT) if (_prefixHeaderFiles) cotire_setup_prefix_generation_from_provided_command( ${_language} ${_target} "${_targetSourceDir}" "${_targetConfigScript}" "${_prefixFile}" _cmds ${_prefixHeaderFiles}) else() cotire_setup_prefix_generation_from_unity_command( ${_language} ${_target} "${_targetSourceDir}" "${_targetConfigScript}" "${_prefixFile}" "${_unityFiles}" _cmds ${_unitySourceFiles}) endif() # check if selected language has enough sources at all list (LENGTH _sourceFiles _numberOfSources) if (_numberOfSources LESS ${COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES}) set (_targetUsePCH FALSE) else() get_target_property(_targetUsePCH ${_target} COTIRE_ENABLE_PRECOMPILED_HEADER) endif() if (_targetUsePCH) cotire_make_pch_file_path(${_language} "${_targetSourceDir}" ${_target} _pchFile) if (_pchFile) cotire_setup_pch_file_compilation( ${_language} ${_target} "${_targetSourceDir}" "${_targetConfigScript}" "${_prefixFile}" "${_pchFile}" ${_sourceFiles}) cotire_setup_pch_file_inclusion( ${_language} ${_target} ${_wholeTarget} "${_prefixFile}" "${_pchFile}" ${_sourceFiles}) endif() elseif (_prefixHeaderFiles) # user provided prefix header must be included unconditionally cotire_setup_prefix_file_inclusion(${_language} ${_target} "${_prefixFile}" ${_sourceFiles}) endif() endif() # mark target as cotired for language set_property(TARGET ${_target} PROPERTY COTIRE_${_language}_UNITY_SOURCE "${_unityFiles}") if (_prefixFile) set_property(TARGET ${_target} PROPERTY COTIRE_${_language}_PREFIX_HEADER "${_prefixFile}") if (_targetUsePCH AND _pchFile) set_property(TARGET ${_target} PROPERTY COTIRE_${_language}_PRECOMPILED_HEADER "${_pchFile}") endif() endif() set (${_cmdsVar} ${_cmds} PARENT_SCOPE) endfunction() function (cotire_setup_clean_target _target) set (_cleanTargetName "${_target}${COTIRE_CLEAN_TARGET_SUFFIX}") if (NOT TARGET "${_cleanTargetName}") cotire_set_cmd_to_prologue(_cmds) get_filename_component(_outputDir "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}" ABSOLUTE) list (APPEND _cmds -P "${COTIRE_CMAKE_MODULE_FILE}" "cleanup" "${_outputDir}" "${COTIRE_INTDIR}" "${_target}") add_custom_target(${_cleanTargetName} COMMAND ${_cmds} WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" COMMENT "Cleaning up target ${_target} cotire generated files" VERBATIM) cotire_init_target("${_cleanTargetName}") endif() endfunction() function (cotire_setup_pch_target _languages _configurations _target) if ("${CMAKE_GENERATOR}" MATCHES "Makefiles|Ninja") # for makefile based generators, we add a custom target to trigger the generation of the cotire related files set (_dependsFiles "") foreach (_language ${_languages}) set (_props COTIRE_${_language}_PREFIX_HEADER COTIRE_${_language}_UNITY_SOURCE) if (NOT CMAKE_${_language}_COMPILER_ID MATCHES "MSVC|Intel") # Visual Studio and Intel only create precompiled header as a side effect list (INSERT _props 0 COTIRE_${_language}_PRECOMPILED_HEADER) endif() cotire_get_first_set_property_value(_dependsFile TARGET ${_target} ${_props}) if (_dependsFile) list (APPEND _dependsFiles "${_dependsFile}") endif() endforeach() if (_dependsFiles) set (_pchTargetName "${_target}${COTIRE_PCH_TARGET_SUFFIX}") add_custom_target("${_pchTargetName}" DEPENDS ${_dependsFiles}) cotire_init_target("${_pchTargetName}") cotire_add_to_pch_all_target(${_pchTargetName}) endif() else() # for other generators, we add the "clean all" target to clean up the precompiled header cotire_setup_clean_all_target() endif() endfunction() function (cotire_setup_unity_build_target _languages _configurations _targetSourceDir _target) get_target_property(_unityTargetName ${_target} COTIRE_UNITY_TARGET_NAME) if (NOT _unityTargetName) set (_unityTargetName "${_target}${COTIRE_UNITY_BUILD_TARGET_SUFFIX}") endif() # determine unity target sub type get_target_property(_targetType ${_target} TYPE) if ("${_targetType}" STREQUAL "EXECUTABLE") set (_unityTargetSubType "") elseif (_targetType MATCHES "(STATIC|SHARED|MODULE|OBJECT)_LIBRARY") set (_unityTargetSubType "${CMAKE_MATCH_1}") else() message (WARNING "cotire: target ${_target} has unknown target type ${_targetType}.") return() endif() # determine unity target sources get_target_property(_targetSourceFiles ${_target} SOURCES) set (_unityTargetSources ${_targetSourceFiles}) foreach (_language ${_languages}) get_property(_unityFiles TARGET ${_target} PROPERTY COTIRE_${_language}_UNITY_SOURCE) if (_unityFiles) # remove source files that are included in the unity source set (_sourceFiles "") set (_excludedSources "") set (_cotiredSources "") cotire_filter_language_source_files(${_language} _sourceFiles _excludedSources _cotiredSources ${_targetSourceFiles}) if (_sourceFiles OR _cotiredSources) list (REMOVE_ITEM _unityTargetSources ${_sourceFiles} ${_cotiredSources}) endif() # if cotire is applied to a target which has not been added in the current source dir, # non-existing files cannot be referenced from the unity build target (this is a CMake restriction) if (NOT "${_targetSourceDir}" STREQUAL "${CMAKE_CURRENT_SOURCE_DIR}") set (_nonExistingFiles "") foreach (_file ${_unityTargetSources}) if (NOT EXISTS "${_file}") list (APPEND _nonExistingFiles "${_file}") endif() endforeach() if (_nonExistingFiles) if (COTIRE_VERBOSE) message (STATUS "removing non-existing ${_nonExistingFiles} from ${_unityTargetName}") endif() list (REMOVE_ITEM _unityTargetSources ${_nonExistingFiles}) endif() endif() # add unity source files instead list (APPEND _unityTargetSources ${_unityFiles}) endif() endforeach() if (COTIRE_DEBUG) message (STATUS "add ${_targetType} ${_unityTargetName} ${_unityTargetSubType} EXCLUDE_FROM_ALL ${_unityTargetSources}") endif() # generate unity target if ("${_targetType}" STREQUAL "EXECUTABLE") add_executable(${_unityTargetName} ${_unityTargetSubType} EXCLUDE_FROM_ALL ${_unityTargetSources}) else() add_library(${_unityTargetName} ${_unityTargetSubType} EXCLUDE_FROM_ALL ${_unityTargetSources}) endif() set (_outputDirProperties ARCHIVE_OUTPUT_DIRECTORY ARCHIVE_OUTPUT_DIRECTORY_ LIBRARY_OUTPUT_DIRECTORY LIBRARY_OUTPUT_DIRECTORY_ RUNTIME_OUTPUT_DIRECTORY RUNTIME_OUTPUT_DIRECTORY_) # copy output location properties if (COTIRE_UNITY_OUTPUT_DIRECTORY) set (_setDefaultOutputDir TRUE) if (IS_ABSOLUTE "${COTIRE_UNITY_OUTPUT_DIRECTORY}") set (_outputDir "${COTIRE_UNITY_OUTPUT_DIRECTORY}") else() cotire_copy_set_properites("${_configurations}" TARGET ${_target} ${_unityTargetName} ${_outputDirProperties}) cotire_resolve_config_properites("${_configurations}" _properties ${_outputDirProperties}) foreach (_property ${_properties}) get_property(_outputDir TARGET ${_target} PROPERTY ${_property}) if (_outputDir) get_filename_component(_outputDir "${_outputDir}/${COTIRE_UNITY_OUTPUT_DIRECTORY}" ABSOLUTE) set_property(TARGET ${_unityTargetName} PROPERTY ${_property} "${_outputDir}") set (_setDefaultOutputDir FALSE) endif() endforeach() if (_setDefaultOutputDir) get_filename_component(_outputDir "${CMAKE_CURRENT_BINARY_DIR}/${COTIRE_UNITY_OUTPUT_DIRECTORY}" ABSOLUTE) endif() endif() if (_setDefaultOutputDir) set_target_properties(${_unityTargetName} PROPERTIES ARCHIVE_OUTPUT_DIRECTORY "${_outputDir}" LIBRARY_OUTPUT_DIRECTORY "${_outputDir}" RUNTIME_OUTPUT_DIRECTORY "${_outputDir}") endif() else() cotire_copy_set_properites("${_configurations}" TARGET ${_target} ${_unityTargetName} ${_outputDirProperties}) endif() # copy output name cotire_copy_set_properites("${_configurations}" TARGET ${_target} ${_unityTargetName} ARCHIVE_OUTPUT_NAME ARCHIVE_OUTPUT_NAME_ LIBRARY_OUTPUT_NAME LIBRARY_OUTPUT_NAME_ OUTPUT_NAME OUTPUT_NAME_ RUNTIME_OUTPUT_NAME RUNTIME_OUTPUT_NAME_ PREFIX _POSTFIX SUFFIX IMPORT_PREFIX IMPORT_SUFFIX) # copy compile stuff cotire_copy_set_properites("${_configurations}" TARGET ${_target} ${_unityTargetName} COMPILE_DEFINITIONS COMPILE_DEFINITIONS_ COMPILE_FLAGS COMPILE_OPTIONS Fortran_FORMAT Fortran_MODULE_DIRECTORY INCLUDE_DIRECTORIES INTERPROCEDURAL_OPTIMIZATION INTERPROCEDURAL_OPTIMIZATION_ POSITION_INDEPENDENT_CODE C_VISIBILITY_PRESET CXX_VISIBILITY_PRESET VISIBILITY_INLINES_HIDDEN) # copy interface stuff cotire_copy_set_properites("${_configurations}" TARGET ${_target} ${_unityTargetName} COMPATIBLE_INTERFACE_BOOL COMPATIBLE_INTERFACE_NUMBER_MAX COMPATIBLE_INTERFACE_NUMBER_MIN COMPATIBLE_INTERFACE_STRING INTERFACE_COMPILE_DEFINITIONS INTERFACE_COMPILE_OPTIONS INTERFACE_INCLUDE_DIRECTORIES INTERFACE_POSITION_INDEPENDENT_CODE INTERFACE_SYSTEM_INCLUDE_DIRECTORIES INTERFACE_AUTOUIC_OPTIONS) # copy link stuff cotire_copy_set_properites("${_configurations}" TARGET ${_target} ${_unityTargetName} BUILD_WITH_INSTALL_RPATH INSTALL_RPATH INSTALL_RPATH_USE_LINK_PATH SKIP_BUILD_RPATH LINKER_LANGUAGE LINK_DEPENDS LINK_DEPENDS_NO_SHARED LINK_FLAGS LINK_FLAGS_ LINK_INTERFACE_LIBRARIES LINK_INTERFACE_LIBRARIES_ LINK_INTERFACE_MULTIPLICITY LINK_INTERFACE_MULTIPLICITY_ LINK_SEARCH_START_STATIC LINK_SEARCH_END_STATIC STATIC_LIBRARY_FLAGS STATIC_LIBRARY_FLAGS_ NO_SONAME SOVERSION VERSION) # copy Qt stuff cotire_copy_set_properites("${_configurations}" TARGET ${_target} ${_unityTargetName} AUTOMOC AUTOMOC_MOC_OPTIONS AUTOUIC AUTOUIC_OPTIONS AUTORCC AUTORCC_OPTIONS AUTOGEN_TARGET_DEPENDS) # copy cmake stuff cotire_copy_set_properites("${_configurations}" TARGET ${_target} ${_unityTargetName} IMPLICIT_DEPENDS_INCLUDE_TRANSFORM RULE_LAUNCH_COMPILE RULE_LAUNCH_CUSTOM RULE_LAUNCH_LINK) # copy Apple platform specific stuff cotire_copy_set_properites("${_configurations}" TARGET ${_target} ${_unityTargetName} BUNDLE BUNDLE_EXTENSION FRAMEWORK INSTALL_NAME_DIR MACOSX_BUNDLE MACOSX_BUNDLE_INFO_PLIST MACOSX_FRAMEWORK_INFO_PLIST MACOSX_RPATH OSX_ARCHITECTURES OSX_ARCHITECTURES_ PRIVATE_HEADER PUBLIC_HEADER RESOURCE) # copy Windows platform specific stuff cotire_copy_set_properites("${_configurations}" TARGET ${_target} ${_unityTargetName} GNUtoMS PDB_NAME PDB_NAME_ PDB_OUTPUT_DIRECTORY PDB_OUTPUT_DIRECTORY_ VS_DOTNET_REFERENCES VS_GLOBAL_KEYWORD VS_GLOBAL_PROJECT_TYPES VS_GLOBAL_ROOTNAMESPACE VS_KEYWORD VS_SCC_AUXPATH VS_SCC_LOCALPATH VS_SCC_PROJECTNAME VS_SCC_PROVIDER VS_WINRT_EXTENSIONS VS_WINRT_REFERENCES WIN32_EXECUTABLE) # use output name from original target get_target_property(_targetOutputName ${_unityTargetName} OUTPUT_NAME) if (NOT _targetOutputName) set_property(TARGET ${_unityTargetName} PROPERTY OUTPUT_NAME "${_target}") endif() # use export symbol from original target cotire_get_target_export_symbol("${_target}" _defineSymbol) if (_defineSymbol) set_property(TARGET ${_unityTargetName} PROPERTY DEFINE_SYMBOL "${_defineSymbol}") if ("${_targetType}" STREQUAL "EXECUTABLE") set_property(TARGET ${_unityTargetName} PROPERTY ENABLE_EXPORTS TRUE) endif() endif() cotire_init_target(${_unityTargetName}) cotire_add_to_unity_all_target(${_unityTargetName}) set_property(TARGET ${_target} PROPERTY COTIRE_UNITY_TARGET_NAME "${_unityTargetName}") endfunction(cotire_setup_unity_build_target) function (cotire_target _target) set(_options "") set(_oneValueArgs SOURCE_DIR BINARY_DIR) set(_multiValueArgs LANGUAGES CONFIGURATIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) if (NOT _option_SOURCE_DIR) set (_option_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}") endif() if (NOT _option_BINARY_DIR) set (_option_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}") endif() if (NOT _option_LANGUAGES) get_property (_option_LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES) endif() if (NOT _option_CONFIGURATIONS) cotire_get_configuration_types(_option_CONFIGURATIONS) endif() # trivial checks get_target_property(_imported ${_target} IMPORTED) if (_imported) message (WARNING "cotire: imported target ${_target} cannot be cotired.") return() endif() # resolve alias get_target_property(_aliasName ${_target} ALIASED_TARGET) if (_aliasName) if (COTIRE_DEBUG) message (STATUS "${_target} is an alias. Applying cotire to aliased target ${_aliasName} instead.") endif() set (_target ${_aliasName}) endif() # check if target needs to be cotired for build type # when using configuration types, the test is performed at build time cotire_init_cotire_target_properties(${_target}) if (NOT CMAKE_CONFIGURATION_TYPES) if (CMAKE_BUILD_TYPE) list (FIND _option_CONFIGURATIONS "${CMAKE_BUILD_TYPE}" _index) else() list (FIND _option_CONFIGURATIONS "None" _index) endif() if (_index EQUAL -1) if (COTIRE_DEBUG) message (STATUS "CMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} not cotired (${_option_CONFIGURATIONS})") endif() return() endif() endif() # choose languages that apply to the target cotire_choose_target_languages("${_option_SOURCE_DIR}" "${_target}" _targetLanguages _wholeTarget ${_option_LANGUAGES}) if (NOT _targetLanguages) return() endif() set (_cmds "") foreach (_language ${_targetLanguages}) cotire_process_target_language("${_language}" "${_option_CONFIGURATIONS}" "${_option_SOURCE_DIR}" "${_option_BINARY_DIR}" ${_target} ${_wholeTarget} _cmd) if (_cmd) list (APPEND _cmds ${_cmd}) endif() endforeach() get_target_property(_targetAddSCU ${_target} COTIRE_ADD_UNITY_BUILD) if (_targetAddSCU) cotire_setup_unity_build_target("${_targetLanguages}" "${_option_CONFIGURATIONS}" "${_option_SOURCE_DIR}" ${_target}) endif() get_target_property(_targetUsePCH ${_target} COTIRE_ENABLE_PRECOMPILED_HEADER) if (_targetUsePCH) cotire_setup_target_pch_usage("${_targetLanguages}" "${_option_SOURCE_DIR}" ${_target} ${_wholeTarget} ${_cmds}) cotire_setup_pch_target("${_targetLanguages}" "${_option_CONFIGURATIONS}" ${_target}) endif() get_target_property(_targetAddCleanTarget ${_target} COTIRE_ADD_CLEAN) if (_targetAddCleanTarget) cotire_setup_clean_target(${_target}) endif() endfunction(cotire_target) function (cotire_map_libraries _strategy _mappedLibrariesVar) set (_mappedLibraries "") foreach (_library ${ARGN}) if (TARGET "${_library}" AND "${_strategy}" MATCHES "COPY_UNITY") # use target's corresponding unity target, if available get_target_property(_libraryUnityTargetName ${_library} COTIRE_UNITY_TARGET_NAME) if (TARGET "${_libraryUnityTargetName}") list (APPEND _mappedLibraries "${_libraryUnityTargetName}") else() list (APPEND _mappedLibraries "${_library}") endif() else() list (APPEND _mappedLibraries "${_library}") endif() endforeach() list (REMOVE_DUPLICATES _mappedLibraries) set (${_mappedLibrariesVar} ${_mappedLibraries} PARENT_SCOPE) endfunction() function (cotire_target_link_libraries _target) get_target_property(_unityTargetName ${_target} COTIRE_UNITY_TARGET_NAME) if (TARGET "${_unityTargetName}") get_target_property(_linkLibrariesStrategy ${_target} COTIRE_UNITY_LINK_LIBRARIES_INIT) if (COTIRE_DEBUG) message (STATUS "unity target ${_unityTargetName} link strategy: ${_linkLibrariesStrategy}") endif() if ("${_linkLibrariesStrategy}" MATCHES "^(COPY|COPY_UNITY)$") if (CMAKE_VERSION VERSION_LESS "2.8.11") message (WARNING "cotire: unity target link strategy ${_linkLibrariesStrategy} requires CMake 2.8.11 or later. Defaulting to NONE for ${_target}.") else() set (_unityLinkLibraries "") get_target_property(_linkLibraries ${_target} LINK_LIBRARIES) if (_linkLibraries) list (APPEND _unityLinkLibraries ${_linkLibraries}) endif() get_target_property(_interfaceLinkLibraries ${_target} INTERFACE_LINK_LIBRARIES) if (_interfaceLinkLibraries) list (APPEND _unityLinkLibraries ${_interfaceLinkLibraries}) endif() cotire_map_libraries("${_linkLibrariesStrategy}" _unityLinkLibraries ${_unityLinkLibraries}) if (COTIRE_DEBUG) message (STATUS "unity target ${_unityTargetName} libraries: ${_unityLinkLibraries}") endif() if (_unityLinkLibraries) target_link_libraries(${_unityTargetName} ${_unityLinkLibraries}) endif() endif() endif() endif() endfunction(cotire_target_link_libraries) function (cotire_cleanup _binaryDir _cotireIntermediateDirName _targetName) if (_targetName) file (GLOB_RECURSE _cotireFiles "${_binaryDir}/${_targetName}*.*") else() file (GLOB_RECURSE _cotireFiles "${_binaryDir}/*.*") endif() # filter files in intermediate directory set (_filesToRemove "") foreach (_file ${_cotireFiles}) get_filename_component(_dir "${_file}" PATH) get_filename_component(_dirName "${_dir}" NAME) if ("${_dirName}" STREQUAL "${_cotireIntermediateDirName}") list (APPEND _filesToRemove "${_file}") endif() endforeach() if (_filesToRemove) if (COTIRE_VERBOSE) message (STATUS "removing ${_filesToRemove}") endif() file (REMOVE ${_filesToRemove}) endif() endfunction() function (cotire_init_target _targetName) if (COTIRE_TARGETS_FOLDER) set_target_properties(${_targetName} PROPERTIES FOLDER "${COTIRE_TARGETS_FOLDER}") endif() if (MSVC_IDE) set_target_properties(${_targetName} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD TRUE) endif() endfunction() function (cotire_add_to_pch_all_target _pchTargetName) set (_targetName "${COTIRE_PCH_ALL_TARGET_NAME}") if (NOT TARGET "${_targetName}") add_custom_target("${_targetName}" WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" VERBATIM) cotire_init_target("${_targetName}") endif() cotire_setup_clean_all_target() add_dependencies(${_targetName} ${_pchTargetName}) endfunction() function (cotire_add_to_unity_all_target _unityTargetName) set (_targetName "${COTIRE_UNITY_BUILD_ALL_TARGET_NAME}") if (NOT TARGET "${_targetName}") add_custom_target("${_targetName}" WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" VERBATIM) cotire_init_target("${_targetName}") endif() cotire_setup_clean_all_target() add_dependencies(${_targetName} ${_unityTargetName}) endfunction() function (cotire_setup_clean_all_target) set (_targetName "${COTIRE_CLEAN_ALL_TARGET_NAME}") if (NOT TARGET "${_targetName}") cotire_set_cmd_to_prologue(_cmds) list (APPEND _cmds -P "${COTIRE_CMAKE_MODULE_FILE}" "cleanup" "${CMAKE_BINARY_DIR}" "${COTIRE_INTDIR}") add_custom_target(${_targetName} COMMAND ${_cmds} WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" COMMENT "Cleaning up all cotire generated files" VERBATIM) cotire_init_target("${_targetName}") endif() endfunction() function (cotire) set(_options "") set(_oneValueArgs SOURCE_DIR BINARY_DIR) set(_multiValueArgs LANGUAGES CONFIGURATIONS) cmake_parse_arguments(_option "${_options}" "${_oneValueArgs}" "${_multiValueArgs}" ${ARGN}) set (_targets ${_option_UNPARSED_ARGUMENTS}) if (NOT _option_SOURCE_DIR) set (_option_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}") endif() if (NOT _option_BINARY_DIR) set (_option_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}") endif() foreach (_target ${_targets}) if (TARGET ${_target}) cotire_target(${_target} LANGUAGES ${_option_LANGUAGES} CONFIGURATIONS ${_option_CONFIGURATIONS} SOURCE_DIR "${_option_SOURCE_DIR}" BINARY_DIR "${_option_BINARY_DIR}") else() message (WARNING "cotire: ${_target} is not a target.") endif() endforeach() foreach (_target ${_targets}) if (TARGET ${_target}) cotire_target_link_libraries(${_target}) endif() endforeach() endfunction() if (CMAKE_SCRIPT_MODE_FILE) # cotire is being run in script mode # locate -P on command args set (COTIRE_ARGC -1) foreach (_index RANGE ${CMAKE_ARGC}) if (COTIRE_ARGC GREATER -1) set (COTIRE_ARGV${COTIRE_ARGC} "${CMAKE_ARGV${_index}}") math (EXPR COTIRE_ARGC "${COTIRE_ARGC} + 1") elseif ("${CMAKE_ARGV${_index}}" STREQUAL "-P") set (COTIRE_ARGC 0) endif() endforeach() # include target script if available if ("${COTIRE_ARGV2}" MATCHES "\\.cmake$") # the included target scripts sets up additional variables relating to the target (e.g., COTIRE_TARGET_SOURCES) include("${COTIRE_ARGV2}") endif() if (COTIRE_DEBUG) message (STATUS "${COTIRE_ARGV0} ${COTIRE_ARGV1} ${COTIRE_ARGV2} ${COTIRE_ARGV3} ${COTIRE_ARGV4} ${COTIRE_ARGV5}") endif() if (WIN32) # for MSVC, compiler IDs may not always be set correctly if (MSVC) set (CMAKE_C_COMPILER_ID "MSVC") set (CMAKE_CXX_COMPILER_ID "MSVC") endif() endif() if (NOT COTIRE_BUILD_TYPE) set (COTIRE_BUILD_TYPE "None") endif() string (TOUPPER "${COTIRE_BUILD_TYPE}" _upperConfig) set (_includeDirs ${COTIRE_TARGET_INCLUDE_DIRECTORIES_${_upperConfig}}) set (_systemIncludeDirs ${COTIRE_TARGET_SYSTEM_INCLUDE_DIRECTORIES_${_upperConfig}}) set (_compileDefinitions ${COTIRE_TARGET_COMPILE_DEFINITIONS_${_upperConfig}}) set (_compileFlags ${COTIRE_TARGET_COMPILE_FLAGS_${_upperConfig}}) # check if target has been cotired for actual build type COTIRE_BUILD_TYPE list (FIND COTIRE_TARGET_CONFIGURATION_TYPES "${COTIRE_BUILD_TYPE}" _index) if (_index GREATER -1) set (_sources ${COTIRE_TARGET_SOURCES}) set (_sourceLocations ${COTIRE_TARGET_SOURCE_LOCATIONS}) set (_sourcesDefinitions ${COTIRE_TARGET_SOURCES_COMPILE_DEFINITIONS_${_upperConfig}}) else() if (COTIRE_DEBUG) message (STATUS "COTIRE_BUILD_TYPE=${COTIRE_BUILD_TYPE} not cotired (${COTIRE_TARGET_CONFIGURATION_TYPES})") endif() set (_sources "") set (_sourceLocations "") set (_sourcesDefinitions "") endif() set (_targetPreUndefs ${COTIRE_TARGET_PRE_UNDEFS}) set (_targetPostUndefs ${COTIRE_TARGET_POST_UNDEFS}) set (_sourcesPreUndefs ${COTIRE_TARGET_SOURCES_PRE_UNDEFS}) set (_sourcesPostUndefs ${COTIRE_TARGET_SOURCES_POST_UNDEFS}) if ("${COTIRE_ARGV1}" STREQUAL "unity") if (XCODE) # executing pre-build action under Xcode, check dependency on target script set (_dependsOption DEPENDS "${COTIRE_ARGV2}") else() # executing custom command, no need to re-check for dependencies set (_dependsOption "") endif() cotire_select_unity_source_files("${COTIRE_ARGV3}" _sources ${_sources}) cotire_select_unity_source_files("${COTIRE_ARGV3}" _sourceLocations ${_sourceLocations}) cotire_generate_unity_source( "${COTIRE_ARGV3}" ${_sources} LANGUAGE "${COTIRE_TARGET_LANGUAGE}" SOURCE_LOCATIONS ${_sourceLocations} SOURCES_COMPILE_DEFINITIONS ${_sourcesDefinitions} PRE_UNDEFS ${_targetPreUndefs} POST_UNDEFS ${_targetPostUndefs} SOURCES_PRE_UNDEFS ${_sourcesPreUndefs} SOURCES_POST_UNDEFS ${_sourcesPostUndefs} ${_dependsOption}) elseif ("${COTIRE_ARGV1}" STREQUAL "prefix") if (XCODE) # executing pre-build action under Xcode, check dependency on unity file and prefix dependencies set (_dependsOption DEPENDS "${COTIRE_ARGV4}" ${COTIRE_TARGET_PREFIX_DEPENDS}) else() # executing custom command, no need to re-check for dependencies set (_dependsOption "") endif() set (_files "") foreach (_index RANGE 4 ${COTIRE_ARGC}) if (COTIRE_ARGV${_index}) list (APPEND _files "${COTIRE_ARGV${_index}}") endif() endforeach() cotire_generate_prefix_header( "${COTIRE_ARGV3}" ${_files} COMPILER_EXECUTABLE "${CMAKE_${COTIRE_TARGET_LANGUAGE}_COMPILER}" COMPILER_ARG1 ${CMAKE_${COTIRE_TARGET_LANGUAGE}_COMPILER_ARG1} COMPILER_ID "${CMAKE_${COTIRE_TARGET_LANGUAGE}_COMPILER_ID}" COMPILER_VERSION "${COTIRE_${COTIRE_TARGET_LANGUAGE}_COMPILER_VERSION}" LANGUAGE "${COTIRE_TARGET_LANGUAGE}" IGNORE_PATH "${COTIRE_TARGET_IGNORE_PATH};${COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_PATH}" INCLUDE_PATH ${COTIRE_TARGET_INCLUDE_PATH} IGNORE_EXTENSIONS "${CMAKE_${COTIRE_TARGET_LANGUAGE}_SOURCE_FILE_EXTENSIONS};${COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_EXTENSIONS}" INCLUDE_SYSTEM_FLAG "${COTIRE_INCLUDE_SYSTEM_FLAG}" INCLUDE_DIRECTORIES ${_includeDirs} SYSTEM_INCLUDE_DIRECTORIES ${_systemIncludeDirs} COMPILE_DEFINITIONS ${_compileDefinitions} COMPILE_FLAGS ${_compileFlags} ${_dependsOption}) elseif ("${COTIRE_ARGV1}" STREQUAL "precompile") set (_files "") foreach (_index RANGE 5 ${COTIRE_ARGC}) if (COTIRE_ARGV${_index}) list (APPEND _files "${COTIRE_ARGV${_index}}") endif() endforeach() cotire_precompile_prefix_header( "${COTIRE_ARGV3}" "${COTIRE_ARGV4}" "${COTIRE_ARGV5}" COMPILER_EXECUTABLE "${CMAKE_${COTIRE_TARGET_LANGUAGE}_COMPILER}" COMPILER_ARG1 ${CMAKE_${COTIRE_TARGET_LANGUAGE}_COMPILER_ARG1} COMPILER_ID "${CMAKE_${COTIRE_TARGET_LANGUAGE}_COMPILER_ID}" COMPILER_VERSION "${COTIRE_${COTIRE_TARGET_LANGUAGE}_COMPILER_VERSION}" LANGUAGE "${COTIRE_TARGET_LANGUAGE}" INCLUDE_SYSTEM_FLAG "${COTIRE_INCLUDE_SYSTEM_FLAG}" INCLUDE_DIRECTORIES ${_includeDirs} SYSTEM_INCLUDE_DIRECTORIES ${_systemIncludeDirs} COMPILE_DEFINITIONS ${_compileDefinitions} COMPILE_FLAGS ${_compileFlags}) elseif ("${COTIRE_ARGV1}" STREQUAL "combine") if (COTIRE_TARGET_LANGUAGE) set (_startIndex 3) else() set (_startIndex 2) endif() set (_files "") foreach (_index RANGE ${_startIndex} ${COTIRE_ARGC}) if (COTIRE_ARGV${_index}) list (APPEND _files "${COTIRE_ARGV${_index}}") endif() endforeach() if (COTIRE_TARGET_LANGUAGE) cotire_generate_unity_source(${_files} LANGUAGE "${COTIRE_TARGET_LANGUAGE}") else() cotire_generate_unity_source(${_files}) endif() elseif ("${COTIRE_ARGV1}" STREQUAL "cleanup") cotire_cleanup("${COTIRE_ARGV2}" "${COTIRE_ARGV3}" "${COTIRE_ARGV4}") else() message (FATAL_ERROR "cotire: unknown command \"${COTIRE_ARGV1}\".") endif() else() # cotire is being run in include mode # set up all variable and property definitions unset (COTIRE_C_COMPILER_VERSION CACHE) unset (COTIRE_CXX_COMPILER_VERSION CACHE) if (NOT DEFINED COTIRE_DEBUG_INIT) if (DEFINED COTIRE_DEBUG) set (COTIRE_DEBUG_INIT ${COTIRE_DEBUG}) else() set (COTIRE_DEBUG_INIT FALSE) endif() endif() option (COTIRE_DEBUG "Enable cotire debugging output?" ${COTIRE_DEBUG_INIT}) if (NOT DEFINED COTIRE_VERBOSE_INIT) if (DEFINED COTIRE_VERBOSE) set (COTIRE_VERBOSE_INIT ${COTIRE_VERBOSE}) else() set (COTIRE_VERBOSE_INIT FALSE) endif() endif() option (COTIRE_VERBOSE "Enable cotire verbose output?" ${COTIRE_VERBOSE_INIT}) set (COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_EXTENSIONS "inc;inl;ipp" CACHE STRING "Ignore headers with the listed file extensions from the generated prefix header.") set (COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_PATH "" CACHE STRING "Ignore headers from these directories when generating the prefix header.") set (COTIRE_UNITY_SOURCE_EXCLUDE_EXTENSIONS "m;mm" CACHE STRING "Ignore sources with the listed file extensions from the generated unity source.") set (COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES "3" CACHE STRING "Minimum number of sources in target required to enable use of precompiled header.") if (NOT DEFINED COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES_INIT) if (DEFINED COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES) set (COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES_INIT ${COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES}) elseif ("${CMAKE_GENERATOR}" MATCHES "JOM|Ninja|Visual Studio") # enable parallelization for generators that run multiple jobs by default set (COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES_INIT "-j") else() set (COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES_INIT "0") endif() endif() set (COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES "${COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES_INIT}" CACHE STRING "Maximum number of source files to include in a single unity source file.") if (NOT COTIRE_PREFIX_HEADER_FILENAME_SUFFIX) set (COTIRE_PREFIX_HEADER_FILENAME_SUFFIX "_prefix") endif() if (NOT COTIRE_UNITY_SOURCE_FILENAME_SUFFIX) set (COTIRE_UNITY_SOURCE_FILENAME_SUFFIX "_unity") endif() if (NOT COTIRE_INTDIR) set (COTIRE_INTDIR "cotire") endif() if (NOT COTIRE_PCH_ALL_TARGET_NAME) set (COTIRE_PCH_ALL_TARGET_NAME "all_pch") endif() if (NOT COTIRE_UNITY_BUILD_ALL_TARGET_NAME) set (COTIRE_UNITY_BUILD_ALL_TARGET_NAME "all_unity") endif() if (NOT COTIRE_CLEAN_ALL_TARGET_NAME) set (COTIRE_CLEAN_ALL_TARGET_NAME "clean_cotire") endif() if (NOT COTIRE_CLEAN_TARGET_SUFFIX) set (COTIRE_CLEAN_TARGET_SUFFIX "_clean_cotire") endif() if (NOT COTIRE_PCH_TARGET_SUFFIX) set (COTIRE_PCH_TARGET_SUFFIX "_pch") endif() if (NOT COTIRE_UNITY_BUILD_TARGET_SUFFIX) set (COTIRE_UNITY_BUILD_TARGET_SUFFIX "_unity") endif() if (NOT DEFINED COTIRE_TARGETS_FOLDER) set (COTIRE_TARGETS_FOLDER "cotire") endif() if (NOT DEFINED COTIRE_UNITY_OUTPUT_DIRECTORY) if ("${CMAKE_GENERATOR}" MATCHES "Ninja") # generated Ninja build files do not work if the unity target produces the same output file as the cotired target set (COTIRE_UNITY_OUTPUT_DIRECTORY "unity") else() set (COTIRE_UNITY_OUTPUT_DIRECTORY "") endif() endif() # define cotire cache variables define_property( CACHED_VARIABLE PROPERTY "COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_PATH" BRIEF_DOCS "Ignore headers from these directories when generating the prefix header." FULL_DOCS "The variable can be set to a semicolon separated list of include directories." "If a header file is found in one of these directories or sub-directories, it will be excluded from the generated prefix header." "If not defined, defaults to empty list." ) define_property( CACHED_VARIABLE PROPERTY "COTIRE_ADDITIONAL_PREFIX_HEADER_IGNORE_EXTENSIONS" BRIEF_DOCS "Ignore includes with the listed file extensions from the generated prefix header." FULL_DOCS "The variable can be set to a semicolon separated list of file extensions." "If a header file extension matches one in the list, it will be excluded from the generated prefix header." "Includes with an extension in CMAKE__SOURCE_FILE_EXTENSIONS are always ignored." "If not defined, defaults to inc;inl;ipp." ) define_property( CACHED_VARIABLE PROPERTY "COTIRE_UNITY_SOURCE_EXCLUDE_EXTENSIONS" BRIEF_DOCS "Exclude sources with the listed file extensions from the generated unity source." FULL_DOCS "The variable can be set to a semicolon separated list of file extensions." "If a source file extension matches one in the list, it will be excluded from the generated unity source file." "Source files with an extension in CMAKE__IGNORE_EXTENSIONS are always excluded." "If not defined, defaults to m;mm." ) define_property( CACHED_VARIABLE PROPERTY "COTIRE_MINIMUM_NUMBER_OF_TARGET_SOURCES" BRIEF_DOCS "Minimum number of sources in target required to enable use of precompiled header." FULL_DOCS "The variable can be set to an integer > 0." "If a target contains less than that number of source files, cotire will not enable the use of the precompiled header for the target." "If not defined, defaults to 3." ) define_property( CACHED_VARIABLE PROPERTY "COTIRE_MAXIMUM_NUMBER_OF_UNITY_INCLUDES" BRIEF_DOCS "Maximum number of source files to include in a single unity source file." FULL_DOCS "This may be set to an integer >= 0." "If 0, cotire will only create a single unity source file." "If a target contains more than that number of source files, cotire will create multiple unity source files for it." "Can be set to \"-j\" to optimize the count of unity source files for the number of available processor cores." "Can be set to \"-j jobs\" to optimize the number of unity source files for the given number of simultaneous jobs." "Is used to initialize the target property COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES." "Defaults to \"-j\" for the generators Visual Studio, JOM or Ninja. Defaults to 0 otherwise." ) # define cotire directory properties define_property( DIRECTORY PROPERTY "COTIRE_ENABLE_PRECOMPILED_HEADER" BRIEF_DOCS "Modify build command of cotired targets added in this directory to make use of the generated precompiled header." FULL_DOCS "See target property COTIRE_ENABLE_PRECOMPILED_HEADER." ) define_property( DIRECTORY PROPERTY "COTIRE_ADD_UNITY_BUILD" BRIEF_DOCS "Add a new target that performs a unity build for cotired targets added in this directory." FULL_DOCS "See target property COTIRE_ADD_UNITY_BUILD." ) define_property( DIRECTORY PROPERTY "COTIRE_ADD_CLEAN" BRIEF_DOCS "Add a new target that cleans all cotire generated files for cotired targets added in this directory." FULL_DOCS "See target property COTIRE_ADD_CLEAN." ) define_property( DIRECTORY PROPERTY "COTIRE_PREFIX_HEADER_IGNORE_PATH" BRIEF_DOCS "Ignore headers from these directories when generating the prefix header." FULL_DOCS "See target property COTIRE_PREFIX_HEADER_IGNORE_PATH." ) define_property( DIRECTORY PROPERTY "COTIRE_PREFIX_HEADER_INCLUDE_PATH" BRIEF_DOCS "Honor headers from these directories when generating the prefix header." FULL_DOCS "See target property COTIRE_PREFIX_HEADER_INCLUDE_PATH." ) define_property( DIRECTORY PROPERTY "COTIRE_UNITY_SOURCE_PRE_UNDEFS" BRIEF_DOCS "Preprocessor undefs to place in the generated unity source file before the inclusion of each source file." FULL_DOCS "See target property COTIRE_UNITY_SOURCE_PRE_UNDEFS." ) define_property( DIRECTORY PROPERTY "COTIRE_UNITY_SOURCE_POST_UNDEFS" BRIEF_DOCS "Preprocessor undefs to place in the generated unity source file after the inclusion of each source file." FULL_DOCS "See target property COTIRE_UNITY_SOURCE_POST_UNDEFS." ) define_property( DIRECTORY PROPERTY "COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES" BRIEF_DOCS "Maximum number of source files to include in a single unity source file." FULL_DOCS "See target property COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES." ) define_property( DIRECTORY PROPERTY "COTIRE_UNITY_LINK_LIBRARIES_INIT" BRIEF_DOCS "Define strategy for setting up the unity target's link libraries." FULL_DOCS "See target property COTIRE_UNITY_LINK_LIBRARIES_INIT." ) # define cotire target properties define_property( TARGET PROPERTY "COTIRE_ENABLE_PRECOMPILED_HEADER" INHERITED BRIEF_DOCS "Modify this target's build command to make use of the generated precompiled header." FULL_DOCS "If this property is set to TRUE, cotire will modify the build command to make use of the generated precompiled header." "Irrespective of the value of this property, cotire will setup custom commands to generate the unity source and prefix header for the target." "For makefile based generators cotire will also set up a custom target to manually invoke the generation of the precompiled header." "The target name will be set to this target's name with the suffix _pch appended." "Inherited from directory." "Defaults to TRUE." ) define_property( TARGET PROPERTY "COTIRE_ADD_UNITY_BUILD" INHERITED BRIEF_DOCS "Add a new target that performs a unity build for this target." FULL_DOCS "If this property is set to TRUE, cotire creates a new target of the same type that uses the generated unity source file instead of the target sources." "Most of the relevant target properties will be copied from this target to the new unity build target." "Target dependencies and linked libraries have to be manually set up for the new unity build target." "The unity target name will be set to this target's name with the suffix _unity appended." "Inherited from directory." "Defaults to TRUE." ) define_property( TARGET PROPERTY "COTIRE_ADD_CLEAN" INHERITED BRIEF_DOCS "Add a new target that cleans all cotire generated files for this target." FULL_DOCS "If this property is set to TRUE, cotire creates a new target that clean all files (unity source, prefix header, precompiled header)." "The clean target name will be set to this target's name with the suffix _clean_cotire appended." "Inherited from directory." "Defaults to FALSE." ) define_property( TARGET PROPERTY "COTIRE_PREFIX_HEADER_IGNORE_PATH" INHERITED BRIEF_DOCS "Ignore headers from these directories when generating the prefix header." FULL_DOCS "The property can be set to a list of directories." "If a header file is found in one of these directories or sub-directories, it will be excluded from the generated prefix header." "Inherited from directory." "If not set, this property is initialized to \${CMAKE_SOURCE_DIR};\${CMAKE_BINARY_DIR}." ) define_property( TARGET PROPERTY "COTIRE_PREFIX_HEADER_INCLUDE_PATH" INHERITED BRIEF_DOCS "Honor headers from these directories when generating the prefix header." FULL_DOCS "The property can be set to a list of directories." "If a header file is found in one of these directories or sub-directories, it will be included in the generated prefix header." "If a header file is both selected by COTIRE_PREFIX_HEADER_IGNORE_PATH and COTIRE_PREFIX_HEADER_INCLUDE_PATH," "the option which yields the closer relative path match wins." "Inherited from directory." "If not set, this property is initialized to the empty list." ) define_property( TARGET PROPERTY "COTIRE_UNITY_SOURCE_PRE_UNDEFS" INHERITED BRIEF_DOCS "Preprocessor undefs to place in the generated unity source file before the inclusion of each target source file." FULL_DOCS "This may be set to a semicolon-separated list of preprocessor symbols." "cotire will add corresponding #undef directives to the generated unit source file before each target source file." "Inherited from directory." "Defaults to empty string." ) define_property( TARGET PROPERTY "COTIRE_UNITY_SOURCE_POST_UNDEFS" INHERITED BRIEF_DOCS "Preprocessor undefs to place in the generated unity source file after the inclusion of each target source file." FULL_DOCS "This may be set to a semicolon-separated list of preprocessor symbols." "cotire will add corresponding #undef directives to the generated unit source file after each target source file." "Inherited from directory." "Defaults to empty string." ) define_property( TARGET PROPERTY "COTIRE_UNITY_SOURCE_MAXIMUM_NUMBER_OF_INCLUDES" INHERITED BRIEF_DOCS "Maximum number of source files to include in a single unity source file." FULL_DOCS "This may be set to an integer > 0." "If a target contains more than that number of source files, cotire will create multiple unity build files for it." "If not set, cotire will only create a single unity source file." "Inherited from directory." "Defaults to empty." ) define_property( TARGET PROPERTY "COTIRE__UNITY_SOURCE_INIT" BRIEF_DOCS "User provided unity source file to be used instead of the automatically generated one." FULL_DOCS "If set, cotire will only add the given file(s) to the generated unity source file." "If not set, cotire will add all the target source files to the generated unity source file." "The property can be set to a user provided unity source file." "Defaults to empty." ) define_property( TARGET PROPERTY "COTIRE__PREFIX_HEADER_INIT" BRIEF_DOCS "User provided prefix header file to be used instead of the automatically generated one." FULL_DOCS "If set, cotire will add the given header file(s) to the generated prefix header file." "If not set, cotire will generate a prefix header by tracking the header files included by the unity source file." "The property can be set to a user provided prefix header file (e.g., stdafx.h)." "Defaults to empty." ) define_property( TARGET PROPERTY "COTIRE_UNITY_LINK_LIBRARIES_INIT" INHERITED BRIEF_DOCS "Define strategy for setting up unity target's link libraries." FULL_DOCS "If this property is empty, the generated unity target's link libraries have to be set up manually." "If this property is set to COPY, the unity target's link libraries will be copied from this target." "If this property is set to COPY_UNITY, the unity target's link libraries will be copied from this target with considering existing unity targets." "Inherited from directory." "Defaults to empty." ) define_property( TARGET PROPERTY "COTIRE__UNITY_SOURCE" BRIEF_DOCS "Read-only property. The generated unity source file(s)." FULL_DOCS "cotire sets this property to the path of the generated single computation unit source file for the target." "Defaults to empty string." ) define_property( TARGET PROPERTY "COTIRE__PREFIX_HEADER" BRIEF_DOCS "Read-only property. The generated prefix header file." FULL_DOCS "cotire sets this property to the full path of the generated language prefix header for the target." "Defaults to empty string." ) define_property( TARGET PROPERTY "COTIRE__PRECOMPILED_HEADER" BRIEF_DOCS "Read-only property. The generated precompiled header file." FULL_DOCS "cotire sets this property to the full path of the generated language precompiled header binary for the target." "Defaults to empty string." ) define_property( TARGET PROPERTY "COTIRE_UNITY_TARGET_NAME" BRIEF_DOCS "The name of the generated unity build target corresponding to this target." FULL_DOCS "This property can be set to the desired name of the unity target that will be created by cotire." "If not set, the unity target name will be set to this target's name with the suffix _unity appended." "After this target has been processed by cotire, the property is set to the actual name of the generated unity target." "Defaults to empty string." ) # define cotire source properties define_property( SOURCE PROPERTY "COTIRE_EXCLUDED" BRIEF_DOCS "Do not modify source file's build command." FULL_DOCS "If this property is set to TRUE, the source file's build command will not be modified to make use of the precompiled header." "The source file will also be excluded from the generated unity source file." "Source files that have their COMPILE_FLAGS property set will be excluded by default." "Defaults to FALSE." ) define_property( SOURCE PROPERTY "COTIRE_DEPENDENCY" BRIEF_DOCS "Add this source file to dependencies of the automatically generated prefix header file." FULL_DOCS "If this property is set to TRUE, the source file is added to dependencies of the generated prefix header file." "If the file is modified, cotire will re-generate the prefix header source upon build." "Defaults to FALSE." ) define_property( SOURCE PROPERTY "COTIRE_UNITY_SOURCE_PRE_UNDEFS" BRIEF_DOCS "Preprocessor undefs to place in the generated unity source file before the inclusion of this source file." FULL_DOCS "This may be set to a semicolon-separated list of preprocessor symbols." "cotire will add corresponding #undef directives to the generated unit source file before this file is included." "Defaults to empty string." ) define_property( SOURCE PROPERTY "COTIRE_UNITY_SOURCE_POST_UNDEFS" BRIEF_DOCS "Preprocessor undefs to place in the generated unity source file after the inclusion of this source file." FULL_DOCS "This may be set to a semicolon-separated list of preprocessor symbols." "cotire will add corresponding #undef directives to the generated unit source file after this file is included." "Defaults to empty string." ) define_property( SOURCE PROPERTY "COTIRE_START_NEW_UNITY_SOURCE" BRIEF_DOCS "Start a new unity source file which includes this source file as the first one." FULL_DOCS "If this property is set to TRUE, cotire will complete the current unity file and start a new one." "The new unity source file will include this source file as the first one." "This property essentially works as a separator for unity source files." "Defaults to FALSE." ) define_property( SOURCE PROPERTY "COTIRE_TARGET" BRIEF_DOCS "Read-only property. Mark this source file as cotired for the given target." FULL_DOCS "cotire sets this property to the name of target, that the source file's build command has been altered for." "Defaults to empty string." ) message (STATUS "cotire ${COTIRE_CMAKE_MODULE_VERSION} loaded.") endif() ================================================ FILE: cmake/packaging.cmake ================================================ # Logic to build packages (RPM/DEB) using CPack; see https://cmake.org/Wiki/CMake:Packaging_With_CPack # set(LINUX_NAME "") set(IS_DEBIAN_PACKAGE FALSE) if(EXISTS "/etc/redhat-release") file(READ "/etc/redhat-release" LINUX_ISSUE) if(LINUX_ISSUE MATCHES "CentOS") set(CPACK_GENERATOR "RPM") set(CPACK_SYSTEM_NAME "centos") set(CPACK_RPM_PACKAGE_REQUIRES "centos-release-scl, epel-release, rh-python36, gmp, libuuid, sqlite, gtkmm30, boost-system, boost-filesystem, boost-program-options, boost-regex, libstdc++") message("-- This is a CentOS system") endif() if(LINUX_ISSUE MATCHES "Scientific Linux") set(CPACK_GENERATOR "RPM") set(CPACK_SYSTEM_NAME "scientific7x") set(CPACK_RPM_PACKAGE_REQUIRES "yum-conf-softwarecollections, epel-release, rh-python36, gmp, libuuid, sqlite, gtkmm30, boost-system, boost-filesystem, boost-program-options, boost-regex, libstdc++") message("-- This is a CentOS system") endif() if(LINUX_ISSUE MATCHES "Fedora") set(CPACK_GENERATOR "RPM") if(LINUX_ISSUE MATCHES "40") message("-- This is a Fedora 40 system") set(CPACK_RPM_PACKAGE_REQUIRES "python3, python3-libs, gmp, libuuid, sqlite, gtkmm30, openssl, boost-system, boost-filesystem, boost-program-options, boost-regex, libstdc++, python3-matplotlib, python3-sympy") set(CPACK_SYSTEM_NAME "fedora40-${STANDARD_ARCH_NAME}") elseif(LINUX_ISSUE MATCHES "41") message("-- This is a Fedora 41 system") set(CPACK_RPM_PACKAGE_REQUIRES "python3, python3-libs, gmp, libuuid, sqlite, gtkmm30, openssl, boost-system, boost-filesystem, boost-program-options, boost-regex, libstdc++, python3-matplotlib, python3-sympy") set(CPACK_SYSTEM_NAME "fedora41-${STANDARD_ARCH_NAME}") elseif(LINUX_ISSUE MATCHES "42") message("-- This is a Fedora 42 system") set(CPACK_RPM_PACKAGE_REQUIRES "python3, python3-libs, gmp, libuuid, sqlite, gtkmm30, openssl, boost-system, boost-filesystem, boost-program-options, boost-regex, libstdc++, python3-matplotlib, python3-sympy") set(CPACK_SYSTEM_NAME "fedora42-${STANDARD_ARCH_NAME}") else() message(FATAL_ERROR "-- This is an old Fedora system <40, we do not support packaging for this anymore.") endif() endif() else() if(EXISTS "/etc/os-release") file(READ "/etc/os-release" LINUX_ISSUE) if(LINUX_ISSUE MATCHES "15.0") set(CPACK_SYSTEM_NAME "leap150") set(CPACK_GENERATOR "RPM") message("-- This is an openSUSE Leap 15.0 system (UNSUPPORTED)") set(CPACK_RPM_PACKAGE_REQUIRES "libpython3_6m1_0, libgmp10, libuuid1, libsqlite3-0, libgtkmm-3_0-1, libboost_system1_66_0, libboost_filesystem1_66_0, libboost_program_options1_66_0, libboost_regex1_66_0, libstdc++6, python3-matplotlib, python3-sympy") endif() if(LINUX_ISSUE MATCHES "Tumbleweed") set(CPACK_SYSTEM_NAME "tumbleweed") set(CPACK_GENERATOR "RPM") message("-- This is an openSUSE Tumbleweed system") set(CPACK_RPM_PACKAGE_REQUIRES "libpython3_13-1_0, libgmp10, libuuid1, libgtkmm-3_0-1, openssl, libboost_system1_88_0, libboost_filesystem1_88_0, libboost_program_options1_88_0, libboost_regex1_88_0, libstdc++6, python313-matplotlib, python313-sympy") endif() endif() if(EXISTS "/etc/issue") file(READ "/etc/issue" LINUX_ISSUE) if(LINUX_ISSUE MATCHES "openSUSE") if(LINUX_ISSUE MATCHES "42.1") set(CPACK_SYSTEM_NAME "leap421") set(CPACK_GENERATOR "RPM") message("-- This is an openSUSE Leap 42.1 system (UNSUPPORTED)") set(CPACK_RPM_PACKAGE_REQUIRES "libpython3_4m1_0, libgmp10, libuuid1, libsqlite3-0, libgtkmm-3_0-1, libboost_system1_61_0, libboost_filesystem1_61_0, libboost_program_options1_61_0, libboost_regex1_61_0, libstdc++6, python3-matplotlib") endif() endif() if(LINUX_ISSUE MATCHES "Debian") set(CPACK_GENERATOR "DEB") set(IS_DEBIAN_PACKAGE TRUE) if(PACKAGING_MODE) set(PYTHON_SITE_PATH "/usr/lib/python3/dist-packages") set(INSTALL_LATEX_DIR "/usr/share/texmf") endif() if(LINUX_ISSUE MATCHES "8") set(CPACK_SYSTEM_NAME "jessie") set(CPACK_DEBIAN_PACKAGE_DEPENDS "python3, libgmpxx4ldbl, libboost-system1.55.0, libboost-filesystem1.55.0, libboost-program-options1.55.0, libboost-regex1.55.0, libstdc++6, uuid-runtime, libgtkmm-3.0-1, python3-matplotlib, python3-mpmath, python3-sympy, python3-gmpy2") message("-- This is a Debian Jessie 8.x system") endif() if(LINUX_ISSUE MATCHES "9") set(CPACK_SYSTEM_NAME "stretch") set(CPACK_DEBIAN_PACKAGE_DEPENDS "python3, libgmpxx4ldbl, libboost-system1.62.0, libboost-filesystem1.62.0, libboost-program-options1.62.0, libboost-regex1.62.0, libstdc++6, uuid-runtime, libgtkmm-3.0-1v5, python3-matplotlib, python3-mpmath, python3-sympy, python3-gmpy2") message("-- This is a Debian Stretch 9.x system") endif() if(LINUX_ISSUE MATCHES "buster") set(CPACK_SYSTEM_NAME "buster") set(CPACK_DEBIAN_PACKAGE_DEPENDS "python3, libgmpxx4ldbl, libboost-system1.67.0, libboost-filesystem1.67.0, libboost-program-options1.67.0, libboost-regex1.67.0, libstdc++6, uuid-runtime, libgtkmm-3.0-1v5, python3-matplotlib, python3-mpmath, python3-sympy, python3-gmpy2") message("-- This is a Debian Buster 10.x system") endif() if(LINUX_ISSUE MATCHES "10") set(CPACK_SYSTEM_NAME "buster") set(CPACK_DEBIAN_PACKAGE_DEPENDS "python3, libgmpxx4ldbl, libboost-system1.67.0, libboost-filesystem1.67.0, libboost-program-options1.67.0, libboost-regex1.67.0, libstdc++6, uuid-runtime, libgtkmm-3.0-1v5, python3-matplotlib, python3-mpmath, python3-sympy, python3-gmpy2") message("-- This is a Debian Buster 10.x system") endif() endif() if(LINUX_ISSUE MATCHES "Ubuntu") set(CPACK_GENERATOR "DEB") set(IS_DEBIAN_PACKAGE TRUE) if(PACKAGING_MODE) set(PYTHON_SITE_PATH "/usr/lib/python3/dist-packages") set(INSTALL_LATEX_DIR "/usr/share/texmf") endif() if(LINUX_ISSUE MATCHES "20.04") set(CPACK_SYSTEM_NAME "focal-${STANDARD_ARCH_NAME}") set(CPACK_DEBIAN_PACKAGE_DEPENDS "python3, libgmpxx4ldbl, libssl3, libboost-system1.71.0, libboost-filesystem1.71.0, libboost-program-options1.71.0, libboost-regex1.71.0, libstdc++6, uuid-runtime, libgtkmm-3.0-1v5, python3-matplotlib, python3-mpmath, python3-sympy, python3-gmpy2") message("-- This is an Ubuntu 20.04 system") endif() if(LINUX_ISSUE MATCHES "22.04") set(CPACK_SYSTEM_NAME "ubuntu-22.04-jammy-${STANDARD_ARCH_NAME}") set(CPACK_DEBIAN_PACKAGE_DEPENDS "python3, libtbb12, libgmpxx4ldbl, libssl3, libboost-system1.74.0, libboost-filesystem1.74.0, libboost-program-options1.74.0, libboost-regex1.74.0, libstdc++6, uuid-runtime, libgtkmm-3.0-1v5, python3-matplotlib, python3-mpmath, python3-sympy, python3-gmpy2") message("-- This is an Ubuntu 22.04 system") endif() if(LINUX_ISSUE MATCHES "24.04") set(CPACK_SYSTEM_NAME "ubuntu-24.04-noble-${STANDARD_ARCH_NAME}") set(CPACK_DEBIAN_PACKAGE_DEPENDS "python3, libtbb12, libgmpxx4ldbl, libssl3, libboost-system1.83.0, libboost-filesystem1.83.0, libboost-program-options1.83.0, libboost-regex1.83.0, libstdc++6, uuid-runtime, libgtkmm-3.0-1t64, librsvg2-2, librsvg2-common, python3-matplotlib, python3-mpmath, python3-sympy, python3-gmpy2") message("-- This is an Ubuntu 24.04 system") endif() endif() if(LINUX_ISSUE MATCHES "Mint") set(CPACK_GENERATOR "DEB") set(IS_DEBIAN_PACKAGE TRUE) if(PACKAGING_MODE) set(PYTHON_SITE_PATH "/usr/lib/python3/dist-packages") set(INSTALL_LATEX_DIR "/usr/share/texmf") endif() if(LINUX_ISSUE MATCHES "19") message("-- This is a Linux Mint 19 system") set(CPACK_SYSTEM_NAME "tessa") set(CPACK_DEBIAN_PACKAGE_DEPENDS "python3, libgmpxx4ldbl, libboost-system1.65.1, libboost-filesystem1.65.1, libboost-program-options1.65.1, libboost-regex1.65.1, libstdc++6, uuid-runtime, libgtkmm-3.0-1v5, python3-matplotlib, python3-mpmath, python3-sympy, python3-gmpy2") endif() if(LINUX_ISSUE MATCHES "20") message("-- This is a Linux Mint 20 system") set(CPACK_SYSTEM_NAME "ulyana") set(CPACK_DEBIAN_PACKAGE_DEPENDS "python3, libgmpxx4ldbl, libboost-system1.71.0, libboost-filesystem1.71.0, libboost-program-options1.71.0, libboost-regex1.71.0, libstdc++6, uuid-runtime, libgtkmm-3.0-1v5, python3-matplotlib, python3-mpmath, python3-sympy, python3-gmpy2") endif() if(LINUX_ISSUE MATCHES "21") message("-- This is a Linux Mint 21 system") set(CPACK_SYSTEM_NAME "vanessa") set(CPACK_DEBIAN_PACKAGE_DEPENDS "python3, libgmpxx4ldbl, libboost-system1.74.0, libboost-filesystem1.74.0, libboost-program-options1.74.0, libboost-regex1.74.0, libstdc++6, uuid-runtime, libgtkmm-3.0-1v5, python3-matplotlib, python3-mpmath, python3-sympy, python3-gmpy2") endif() endif() endif() endif() if(WIN32) message("-- This is a Windows system, creating WIX installer") # A good resource (though not complete) for CPack on various platforms is # https://martin-fieber.de/blog/cmake-cpack-cross-platform-distributables/ # set(CPACK_GENERATOR NSIS) # set(CPACK_PACKAGE_INSTALL_DIRECTORY "Cadabra") # set(CPACK_NSIS_MODIFY_PATH OFF) # set(CPACK_NSIS_EXECUTABLES_DIRECTORY .) # set(CPACK_NSIS_URL_INFO_ABOUT "https://cadabra.science/") # set(CPACK_NSIS_CONTACT "Kasper Peeters ") set(CPACK_GENERATOR WIX) set(CPACK_WIX_ROOT "C:/WiX") set(CPACK_WIX_VERSION 4) set(CPACK_WIX_ARCHITECTURE "${WIX_SHORT_ARCH}") set(CPACK_WIX_LIGHT_EXTRA_FLAGS "-sw1076") set(CPACK_WIX_DESKTOP_SHORTCUTS TRUE) set(CPACK_WIX_PROGRAM_MENU_FOLDER "Cadabra") set(CPACK_PACKAGE_INSTALL_DIRECTORY "Cadabra") set(CPACK_WIX_PROPERTY_ARPURLINFOABOUT "https://cadabra.science/") set(CPACK_WIX_PROPERTY_ARPCONTACT "Kasper Peeters ") set(CPACK_WIX_UPGRADE_GUID "7CD938BA-C9E0-4CF0-8649-B44F292C01B5") set(CPACK_WIX_UI_BANNER "${CMAKE_SOURCE_DIR}/doc/msi_banner.png") set(CPACK_WIX_UI_DIALOG "${CMAKE_SOURCE_DIR}/doc/msi_dialog.png") set(CPACK_WIX_PRODUCT_ICON "${CMAKE_SOURCE_DIR}/config/cadabra2.ico") set(CPACK_PACKAGE_EXECUTABLES "cadabra2-gtk" "Cadabra") set(CPACK_CREATE_DESKTOP_LINKS "cadabra2-gtk") # set(CPACK_WIX_EXTRA_SOURCES "${CMAKE_SOURCE_DIR}/config/shortcuts.wxs") set(CPACK_START_MENU_SHORTCUTS "Cadabra") endif() # Ensure that on Windows we also install the libraries provided # by Visual Studio, e.g. MSVCnnn.DLL. This does mean that the installer # will now contain both the normal and the debug libraries, but better # to have both than to have none. # set(CMAKE_INSTALL_DEBUG_LIBRARIES TRUE) # That didn't work... Commented out for future reference. include (InstallRequiredSystemLibraries) if(NOT WIN32) set(CPACK_SET_DESTDIR true) endif() set(CPACK_INSTALL_PREFIX /usr) set(CPACK_PACKAGE_NAME "cadabra2") set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/doc/license.txt") set(CPACK_RPM_PACKAGE_LICENSE "GPLv3") set(CPACK_PACKAGE_VERSION_MAJOR "${CADABRA_VERSION_MAJOR}") set(CPACK_PACKAGE_VERSION_MINOR "${CADABRA_VERSION_MINOR}") set(CPACK_PACKAGE_VERSION_PATCH "${CADABRA_VERSION_PATCH}") set(CPACK_PACKAGE_VERSION "${CADABRA_VERSION_SEM}") set(CPACK_PACKAGE_VENDOR "Kasper Peeters") set(CPACK_PACKAGE_CONTACT "Kasper Peeters ") set(CPACK_STRIP_FILES ON) set(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA ${CMAKE_CURRENT_BINARY_DIR}/postinst) set(CPACK_RPM_POST_INSTALL_SCRIPT_FILE ${CMAKE_CURRENT_BINARY_DIR}/postinst) unset(CPACK_RPM_PACKAGE_RELOCATABLE) #xdg-desktop-menu install /share/applications/MyApp.desktop set(CPACK_DEBIAN_PACKAGE_SECTION "math") set(CPACK_RPM_PACKAGE_GROUP "Applications/Productivity") set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "field-theory motivated computer algebra system") set(CPACK_PACKAGE_DESCRIPTION_FILE ${CMAKE_CURRENT_SOURCE_DIR}/doc/description) include(CPack) ================================================ FILE: cmake/policies.cmake ================================================ # Policy settings for CMake to resolve ambiguities. if (POLICY CMP0042) cmake_policy(SET CMP0042 NEW) endif() if (POLICY CMP0054) cmake_policy(SET CMP0054 NEW) endif() if (POLICY CMP0127) cmake_policy(SET CMP0127 NEW) endif() ================================================ FILE: cmake/version.cmake ================================================ set(CADABRA_VERSION_MAJOR 2) set(CADABRA_VERSION_MINOR 5) set(CADABRA_VERSION_PATCH 15) set(CADABRA_VERSION_RC "") # do *not* use rcX here, just use X set(CADABRA_VERSION_SEM ${CADABRA_VERSION_MAJOR}.${CADABRA_VERSION_MINOR}.${CADABRA_VERSION_PATCH}) set(CADABRA_VERSION_GITHUB_TAG ${CADABRA_VERSION_SEM}) if(NOT "${CADABRA_VERSION_RC}" STREQUAL "") set(CADABRA_VERSION_GITHUB_TAG ${CADABRA_VERSION_SEM}-rc${CADABRA_VERSION_RC}) if(WIN32) # WiX does not like x.y.z-rcX versions, it wants x.y.z.X set(CADABRA_VERSION_SEM ${CADABRA_VERSION_SEM}.${CADABRA_VERSION_RC}) else() # The rest of the world is normal. set(CADABRA_VERSION_SEM ${CADABRA_VERSION_SEM}-rc${CADABRA_VERSION_RC}) endif() endif() set(COPYRIGHT_YEARS "2001-2025") math(EXPR SYSTEM_BITS "${CMAKE_SIZEOF_VOID_P} * 8") find_program(GIT git PATHS ${GIT_DIR}) if(GIT) message("-- Git found: ${GIT}") execute_process(COMMAND git rev-parse --short HEAD OUTPUT_VARIABLE GIT_SHORT_SHA OUTPUT_STRIP_TRAILING_WHITESPACE) execute_process(COMMAND git rev-list --count HEAD OUTPUT_VARIABLE GIT_COMMIT_SERIAL OUTPUT_STRIP_TRAILING_WHITESPACE) execute_process(COMMAND git log -1 --date=short --pretty=format:%cd OUTPUT_VARIABLE GIT_COMMIT_DATE OUTPUT_STRIP_TRAILING_WHITESPACE) else() message("-- Git not found, not including commit SHA") endif() if(GIT_SHORT_SHA) set(CADABRA_VERSION_BUILD "${GIT_COMMIT_SERIAL}.${GIT_SHORT_SHA}") else() set(CADABRA_VERSION_BUILD "private") endif() if(GIT_COMMIT_DATE) set(CADABRA_VERSION_DATE "${GIT_COMMIT_DATE}") else() string(TIMESTAMP THE_DATE "%Y-%m-%d" UTC) set(CADABRA_VERSION_DATE "${THE_DATE}") endif() ================================================ FILE: cmake/windows.cmake ================================================ # Collection of utilities for using # vcpkg to find libraries in CMake list(LENGTH CMAKE_CONFIGURATION_TYPES N_CONFIGURATION_TYPES) if (${N_CONFIGURATION_TYPES} EQUAL 1) set(CMAKE_BUILD_TYPE ${CMAKE_CONFIGURATION_TYPES}) endif() # Attempts to find the vcpkg.cmake toolchain file and include it # if it has not been supplied. if (NOT VCPKG_TOOLCHAIN) find_file(CMAKE_TOOLCHAIN_FILE vcpkg.cmake HINTS $ENV{userprofile} $ENV{systemdrive} PATH_SUFFIXES vcpkg/scripts/buildsystems) if (CMAKE_TOOLCHAIN_FILE) include(${CMAKE_TOOLCHAIN_FILE}) endif() endif() if (VCPKG_TOOLCHAIN) if(NOT _VCPKG_ROOT_DIR) set(_VCPKG_ROOT_DIR ${Z_VCPKG_ROOT_DIR}) endif() message(STATUS "Found vcpkg at ${Z_VCPKG_ROOT_DIR}") else() message(FATAL_ERROR "Could not find vcpkg (required for building on Visual Studio)") endif() if (VCPKG_TOOLCHAIN) # Location of include files set(VCPKG_INCLUDE_DIRS ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/include) if(CMAKE_BUILD_TYPE MATCHES "^Debug$" OR NOT DEFINED CMAKE_BUILD_TYPE) set(VCPKG_LIB_DIRS ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/debug/lib ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/debug/lib/manual-link ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/lib ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/lib/manual-link ) set(VCPKG_BIN_DIRS ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/debug/bin ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/bin ) else() set(VCPKG_LIB_DIRS ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/lib ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/lib/manual-link ) set(VCPKG_BIN_DIRS ${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}/bin ) endif() message(STATUS "VCPKG_LIB_DIRS = ${VCPKG_LIB_DIRS}") message(STATUS "VCPKG_BIN_DIRS = ${VCPKG_BIN_DIRS}") function(windows_find_file VAR FNAME FEXT) set(TMPVAR "") foreach (DIR ${VCPKG_LIB_DIRS}) # If a debug build is specified, first try and find the library name # with a debug marker if(CMAKE_BUILD_TYPE MATCHES "^Debug$" OR NOT DEFINED CMAKE_BUILD_TYPE) if ("${TMPVAR}" STREQUAL "") # name-d.lib file(GLOB TMPVAR "${DIR}/${FNAME}-d.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # named.lib file(GLOB TMPVAR "${DIR}/${FNAME}d.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # name--d.lib file(GLOB TMPVAR "${DIR}/${FNAME}-[0-9]*-d.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # name-d.lib file(GLOB TMPVAR "${DIR}/${FNAME}-[0-9]*-d.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # name-d.lib file(GLOB TMPVAR "${DIR}/${FNAME}[0-9]*-d.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # named.lib file(GLOB TMPVAR "${DIR}/${FNAME}[0-9]*d.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # name-d-.lib file(GLOB TMPVAR "${DIR}/${FNAME}-d-[0-9]*.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # named-.lib file(GLOB TMPVAR "${DIR}/${FNAME}d-[0-9]*.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # name-d.lib file(GLOB TMPVAR "${DIR}/${FNAME}-d[0-9]*.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # named.lib file(GLOB TMPVAR "${DIR}/${FNAME}d[0-9]*.${FEXT}") endif() endif() # Attempt to find library name without debug marker if ("${TMPVAR}" STREQUAL "") # name.lib file(GLOB TMPVAR "${DIR}/${FNAME}.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # name-.lib file(GLOB TMPVAR "${DIR}/${FNAME}-[0-9]*.${FEXT}") endif() if ("${TMPVAR}" STREQUAL "") # name.lib file(GLOB TMPVAR "${DIR}/${FNAME}[0-9]*.${FEXT}") endif() endforeach() # Assign the result of the search to VAR if ("${TMPVAR}" STREQUAL "") # Couldn't find it, set to NOTFOUND set(${VAR} "${VAR}-NOTFOUND" PARENT_SCOPE) else() # GLOB could return a list of matching filenames, in which case # we choose the latest version (i.e. the one which is alphabetically # last) list(SORT TMPVAR) list(REVERSE TMPVAR) list(GET TMPVAR 0 TMPVAR) set(${VAR} "${TMPVAR}") endif() endfunction() function(windows_find_library VAR) # Avoid some horrible indirections by using a local variable set(OUTVAR "${${VAR}}") # If the variable is already specified, don't attempt to # find it again if ("${OUTVAR}" STREQUAL "") else() return() endif() # Attempt to find the REQUIRED flag foreach(FLAG ${ARGN}) if (FLAG STREQUAL "REQUIRED") set(IS_REQUIRED TRUE) endif() endforeach() # Loop over all the library names foreach(LIBNAME ${ARGN}) set(TMPVAR "") # Could be the required flag if (LIBNAME STREQUAL "REQUIRED") continue() endif() # Loop over all library directories, searching for the # library name foreach (DIR ${VCPKG_LIB_DIRS}) # If a debug build is specified, first try and find the library name # with a debug marker if(CMAKE_BUILD_TYPE MATCHES "^Debug$" OR NOT DEFINED CMAKE_BUILD_TYPE) if ("${TMPVAR}" STREQUAL "") # name-d.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}-d.lib") endif() if ("${TMPVAR}" STREQUAL "") # named.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}d.lib") endif() if ("${TMPVAR}" STREQUAL "") # name--d.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}-[0-9]*-d.lib") endif() if ("${TMPVAR}" STREQUAL "") # name-d.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}-[0-9]*-d.lib") endif() if ("${TMPVAR}" STREQUAL "") # name-d.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}[0-9]*-d.lib") endif() if ("${TMPVAR}" STREQUAL "") # named.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}[0-9]*d.lib") endif() if ("${TMPVAR}" STREQUAL "") # name-d-.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}-d-[0-9]*.lib") endif() if ("${TMPVAR}" STREQUAL "") # named-.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}d-[0-9]*.lib") endif() if ("${TMPVAR}" STREQUAL "") # name-d.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}-d[0-9]*.lib") endif() if ("${TMPVAR}" STREQUAL "") # named.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}d[0-9]*.lib") endif() endif() # Attempt to find library name without debug marker if ("${TMPVAR}" STREQUAL "") # name.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}.lib") endif() if ("${TMPVAR}" STREQUAL "") # name-.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}-[0-9]*.lib") endif() if ("${TMPVAR}" STREQUAL "") # name.lib file(GLOB TMPVAR "${DIR}/${LIBNAME}[0-9]*.lib") endif() endforeach() # Assign the result of the search to VAR if ("${TMPVAR}" STREQUAL "") # Couldn't find it, set to NOTFOUND set(${VAR} "${VAR}-NOTFOUND" PARENT_SCOPE) if (IS_REQUIRED) message(FATAL_ERROR "Could NOT find REQUIRED library ${LIBNAME} required for ${VAR}") else() message(SEND_ERROR "Could NOT find library ${LIBNAME} required for ${VAR}") endif() else() # GLOB could return a list of matching filenames, in which case # we choose the latest version (i.e. the one which is alphabetically # last) list(SORT TMPVAR) list(REVERSE TMPVAR) list(GET TMPVAR 0 TMPVAR) list(APPEND OUTVAR "${TMPVAR}") endif() endforeach() set(${VAR} "${OUTVAR}" PARENT_SCOPE) endfunction() endif() ================================================ FILE: codemeta.json ================================================ { "@context": "https://raw.githubusercontent.com/codemeta/codemeta/master/codemeta.jsonld", "@type": "Code", "author": [ "Kasper Peeters" ], "identifier": "", "codeRepository": "https://github.com/kpeeters/cadabra2", "datePublished": "2018-12-05", "dateModified": "2018-12-05", "dateCreated": "2018-12-05", "description": "Computer algebra for field theory revisited", "keywords": "physics, field theory, tensors, computer algebra", "license": "GPL v3.0", "title": "Cadabra2", "version": "https://doi.org/10.5281/zenodo.1842262" } ================================================ FILE: conda/build.sh ================================================ #!/bin/bash REL_SP_DIR=$(python -c "import os;print(os.path.relpath(os.getenv('SP_DIR'), os.getenv('PREFIX')))") sed -i.bak "s@set(PYTHON_SITE_PATH.*@set(PYTHON_SITE_PATH $REL_SP_DIR)@g" CMakeLists.txt mkdir build cd build cmake \ -DCMAKE_PREFIX_PATH=$PREFIX \ -DCMAKE_INSTALL_PREFIX=$PREFIX \ -DCMAKE_BUILD_TYPE=Release \ -DENABLE_SYSTEM_JSONCPP=ON \ -DENABLE_MATHEMATICA=OFF \ -DENABLE_JUPYTER=ON \ -DPYTHON_EXECUTABLE=$PYTHON \ .. make -j${CPU_COUNT} make install mkdir -p $PREFIX/share/cadabra2 ln -s $SP_DIR $PREFIX/share/cadabra2/python # Following test fails with no module named `module03` found. TESTS_TO_SKIP="modules" if [[ "$target_platform" == osx* ]]; then # The following test segfaults on OSX TESTS_TO_SKIP="${TESTS_TO_SKIP}|implicit" fi ctest --output-on-failure -E "${TESTS_TO_SKIP}" -j${CPU_COUNT} ================================================ FILE: conda/meta.yaml ================================================ {% set name = "cadabra2" %} {% set version = "2.2.9" %} package: name: {{ name|lower }} version: {{ version }} source: path: ../ # url: https://github.com/kpeeters/cadabra2/archive/{{ version }}.tar.gz # sha256: 72786423b2ff847e8e0035326a8f1b2cdcf76a68c77f95588276bbccfaa74d7e # patches: # - cdd2fb45dc3e280d6f3834b54d9f7d612b604155.patch build: number: 0 skip: True # [win] requirements: build: - {{ compiler('c') }} - {{ compiler('cxx') }} - cmake - make - pkg-config - {{ cdt('libice-devel') }} # [linux] - {{ cdt('libsm-devel') }} # [linux] - {{ cdt('libx11-devel') }} # [linux] - {{ cdt('libxcomposite-devel') }} # [linux] - {{ cdt('libxcursor-devel') }} # [linux] - {{ cdt('libxdamage-devel') }} # [linux] - {{ cdt('libxext-devel') }} # [linux] - {{ cdt('libxi-devel') }} # [linux] - {{ cdt('libxinerama-devel') }} # [linux] - {{ cdt('libxfixes-devel') }} # [linux] - {{ cdt('libxrandr-devel') }} # [linux] - {{ cdt('libxrender-devel') }} # [linux] - {{ cdt('libxtst-devel') }} # [linux] - {{ cdt('mesa-libEGL-devel') }} # [linux] - {{ cdt('mesa-libGL-devel') }} # [linux] - {{ cdt('xorg-x11-proto-devel') }} # [linux] host: - python - gmp - boost-cpp - libuuid - sqlite - gtkmm-3.0 - glibmm - cairomm-1.0 - glib - sigcpp-2.0 - pangomm-1.4 - cairo - atkmm-1.6 - nlohmann_json - xtl - jsoncpp - xeus - zeromq - cppzmq - gtkmm - pybind11 # Needed for ctest - sympy run: - boost-cpp - python - sympy - matplotlib-base - xeus # Remove these when they get run_exports - {{ pin_compatible("cairo") }} - {{ pin_compatible("glibmm") }} - {{ pin_compatible("glib") }} test: imports: - cadabra2 about: home: https://github.com/kpeeters/cadabra2 license: GPL-3.0-only license_family: GPL license_file: LICENSE summary: 'a field-theory motivated approach to computer algebra' extra: recipe-maintainers: - isuruf - kpeeters ================================================ FILE: config/AppRun ================================================ #!/bin/bash # # Set the PYTHONHOME and PYTHONPATH variables to ensure that Cadabra # inside the AppImage can find the Python installation. # export PYTHONHOME=$APPDIR/usr export PYTHONPATH=$APPDIR/usr/lib/python3.8:$APPDIR/usr/lib/python3.8/site-packages:$APPDIR/usr/lib/python3.8/dist-packages:$APPDIR/usr/lib/python3/site-packages:$APPDIR/usr/lib/python3/dist-packages export LD_LIBRARY_PATH=$APPDIR/usr/lib:$LD_LIBRARY_PATH export LANG=en_US.UTF-8 exec $APPDIR/usr/bin/cadabra2-gtk "$@" ================================================ FILE: config/Doxyfile ================================================ # Doxyfile 1.8.11 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. # # All text after a double hash (##) is considered a comment and is placed in # front of the TAG it is preceding. # # All text after a single hash (#) is considered a comment and will be ignored. # The format is: # TAG = value [value, ...] # For lists, items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (\" \"). #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the config file # that follow. The default is UTF-8 which is also the encoding used for all text # before the first occurrence of this tag. Doxygen uses libiconv (or the iconv # built into libc) for the transcoding. See http://www.gnu.org/software/libiconv # for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded by # double-quotes, unless you are using Doxywizard) that should identify the # project for which the documentation is generated. This name is used in the # title of most generated pages and in a few other places. # The default value is: My Project. PROJECT_NAME = "Cadabra" # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version # control system is used. PROJECT_NUMBER = # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. PROJECT_BRIEF = "Computer algebra system for field theory problems" # With the PROJECT_LOGO tag one can specify a logo or an icon that is included # in the documentation. The maximum height of the logo should not exceed 55 # pixels and the maximum width should not exceed 200 pixels. Doxygen will copy # the logo to the output directory. PROJECT_LOGO = images/64x64/cadabra2-gtk.png # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path # into which the generated documentation will be written. If a relative path is # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. OUTPUT_DIRECTORY = doxygen # If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and # will distribute the generated files over these directories. Enabling this # option can be useful when feeding doxygen a huge amount of source files, where # putting all generated files in the same directory would otherwise causes # performance problems for the file system. # The default value is: NO. CREATE_SUBDIRS = NO # If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII # characters to appear in the names of generated files. If set to NO, non-ASCII # characters will be escaped, for example _xE3_x81_x84 will be used for Unicode # U+3044. # The default value is: NO. ALLOW_UNICODE_NAMES = NO # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, # Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), # Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, # Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), # Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, # Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, # Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, # Ukrainian and Vietnamese. # The default value is: English. OUTPUT_LANGUAGE = English # If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. # The default value is: YES. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief # description of a member or function before the detailed description # # Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. # The default value is: YES. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator that is # used to form the text in various listings. Each string in this list, if found # as the leading text of the brief description, will be stripped from the text # and the result, after processing the whole list, is used as the annotated # text. Otherwise, the brief description is used as-is. If left blank, the # following values are used ($name is automatically replaced with the name of # the entity):The $name class, The $name widget, The $name file, is, provides, # specifies, contains, represents, a, an and the. ABBREVIATE_BRIEF = # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # doxygen will generate a detailed section even if there is only a brief # description. # The default value is: NO. ALWAYS_DETAILED_SEC = NO # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. # The default value is: NO. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used # The default value is: YES. FULL_PATH_NAMES = YES # The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. # Stripping is only done if one of the specified strings matches the left-hand # part of the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the path to # strip. # # Note that you can specify absolute paths here, but also relative paths, which # will be relative from the directory where doxygen is started. # This tag requires that the tag FULL_PATH_NAMES is set to YES. STRIP_FROM_PATH = # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the # path mentioned in the documentation of a class, which tells the reader which # header file to include in order to use a class. If left blank only the name of # the header file containing the class definition is used. Otherwise one should # specify the list of include paths that are normally passed to the compiler # using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't # support long names like on DOS, Mac, or CD-ROM. # The default value is: NO. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the # first line (until the first dot) of a Javadoc-style comment as the brief # description. If set to NO, the Javadoc-style will behave just like regular Qt- # style comments (thus requiring an explicit @brief command for a brief # description.) # The default value is: NO. JAVADOC_AUTOBRIEF = YES # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus # requiring an explicit \brief command for a brief description.) # The default value is: NO. QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a # multi-line C++ special comment block (i.e. a block of //! or /// comments) as # a brief description. This used to be the default behavior. The new default is # to treat a multi-line C++ comment block as a detailed description. Set this # tag to YES if you prefer the old behavior instead. # # Note that setting this tag to YES also means that rational rose comments are # not recognized any more. # The default value is: NO. MULTILINE_CPP_IS_BRIEF = NO # If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the # documentation from any documented member that it re-implements. # The default value is: YES. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new # page for each member. If set to NO, the documentation of a member will be part # of the file/class/namespace that contains it. # The default value is: NO. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen # uses this value to replace tabs by spaces in code fragments. # Minimum value: 1, maximum value: 16, default value: 4. TAB_SIZE = 4 # This tag can be used to specify a number of aliases that act as commands in # the documentation. An alias has the form: # name=value # For example adding # "sideeffect=@par Side Effects:\n" # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". You can put \n's in the value part of an alias to insert # newlines. ALIASES = # This tag can be used to specify a number of word-keyword mappings (TCL only). # A mapping has the form "name=value". For example adding "class=itcl::class" # will allow you to use the command class in the itcl::class meaning. TCL_SUBST = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all # members will be omitted, etc. # The default value is: NO. OPTIMIZE_OUTPUT_FOR_C = NO # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or # Python sources only. Doxygen will then generate output that is more tailored # for that language. For instance, namespaces will be presented as packages, # qualified scopes will look different, etc. # The default value is: NO. OPTIMIZE_OUTPUT_JAVA = NO # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources. Doxygen will then generate output that is tailored for Fortran. # The default value is: NO. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for VHDL. # The default value is: NO. OPTIMIZE_OUTPUT_VHDL = NO # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, Javascript, # C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: # FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: # Fortran. In the later case the parser tries to guess whether the code is fixed # or free formatted code, this is the default for Fortran type files), VHDL. For # instance to make doxygen treat .inc files as Fortran files (default is PHP), # and .f files as C (default is Fortran), use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable # documentation. See http://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. # The default value is: YES. MARKDOWN_SUPPORT = YES # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can # be prevented in individual cases by putting a % sign in front of the word or # globally by setting AUTOLINK_SUPPORT to NO. # The default value is: YES. AUTOLINK_SUPPORT = YES # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should set this # tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); # versus func(std::string) {}). This also make the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. # The default value is: NO. BUILTIN_STL_SUPPORT = NO # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. # The default value is: NO. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: # http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate # getter and setter methods for a property. Setting this option to YES will make # doxygen to replace the get and set methods by a property in the documentation. # This will only work if the methods are indeed getting or setting a simple # type. If this is not the case, or you want to show the methods anyway, you # should set this option to NO. # The default value is: YES. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. # The default value is: NO. DISTRIBUTE_GROUP_DOC = NO # If one adds a struct or class to a group and this option is enabled, then also # any nested class or struct is added to the same group. By default this option # is disabled and one has to add nested compounds explicitly via \ingroup. # The default value is: NO. GROUP_NESTED_COMPOUNDS = NO # Set the SUBGROUPING tag to YES to allow class member groups of the same type # (for instance a group of public functions) to be put as a subgroup of that # type (e.g. under the Public Functions section). Set it to NO to prevent # subgrouping. Alternatively, this can be done per class using the # \nosubgrouping command. # The default value is: YES. SUBGROUPING = YES # When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions # are shown inside the group in which they are included (e.g. using \ingroup) # instead of on a separate page (for HTML and Man pages) or section (for LaTeX # and RTF). # # Note that this feature does not work in combination with # SEPARATE_MEMBER_PAGES. # The default value is: NO. INLINE_GROUPED_CLASSES = NO # When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions # with only public data fields or simple typedef fields will be shown inline in # the documentation of the scope in which they are defined (i.e. file, # namespace, or group documentation), provided this scope is documented. If set # to NO, structs, classes, and unions are shown on a separate page (for HTML and # Man pages) or section (for LaTeX and RTF). # The default value is: NO. INLINE_SIMPLE_STRUCTS = NO # When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or # enum is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically be # useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. # The default value is: NO. TYPEDEF_HIDES_STRUCT = NO # The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This # cache is used to resolve symbols given their name and scope. Since this can be # an expensive process and often the same symbol appears multiple times in the # code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small # doxygen will become slower. If the cache is too large, memory is wasted. The # cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range # is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 # symbols. At the end of a run doxygen will report the cache usage and suggest # the optimal cache size from a speed point of view. # Minimum value: 0, maximum value: 9, default value: 0. LOOKUP_CACHE_SIZE = 0 #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in # documentation are documented, even if no documentation was available. Private # class members and static file members will be hidden unless the # EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. # Note: This will also disable the warnings about undocumented members that are # normally produced when WARNINGS is set to YES. # The default value is: NO. EXTRACT_ALL = YES # If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will # be included in the documentation. # The default value is: NO. EXTRACT_PRIVATE = YES # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. EXTRACT_PACKAGE = NO # If the EXTRACT_STATIC tag is set to YES, all static members of a file will be # included in the documentation. # The default value is: NO. EXTRACT_STATIC = NO # If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined # locally in source files will be included in the documentation. If set to NO, # only classes defined in header files are included. Does not have any effect # for Java sources. # The default value is: YES. EXTRACT_LOCAL_CLASSES = YES # This flag is only useful for Objective-C code. If set to YES, local methods, # which are defined in the implementation section but not in the interface are # included in the documentation. If set to NO, only methods in the interface are # included. # The default value is: NO. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base name of # the file that contains the anonymous namespace. By default anonymous namespace # are hidden. # The default value is: NO. EXTRACT_ANON_NSPACES = NO # If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all # undocumented members inside documented classes or files. If set to NO these # members will be included in the various overviews, but no documentation # section is generated. This option has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. If set # to NO, these classes will be included in the various overviews. This option # has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend # (class|struct|union) declarations. If set to NO, these declarations will be # included in the documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any # documentation blocks found inside the body of a function. If set to NO, these # blocks will be appended to the function's detailed documentation block. # The default value is: NO. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation that is typed after a # \internal command is included. If the tag is set to NO then the documentation # will be excluded. Set it to YES to include the internal documentation. # The default value is: NO. INTERNAL_DOCS = NO # If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file # names in lower-case letters. If set to YES, upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows # and Mac users are advised to set this option to NO. # The default value is: system dependent. CASE_SENSE_NAMES = NO # If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with # their full class and namespace scopes in the documentation. If set to YES, the # scope will be hidden. # The default value is: NO. HIDE_SCOPE_NAMES = NO # If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will # append additional text to a page's title, such as Class Reference. If set to # YES the compound reference will be hidden. # The default value is: NO. HIDE_COMPOUND_REFERENCE= NO # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. SHOW_INCLUDE_FILES = YES # If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each # grouped member an include statement to the documentation, telling the reader # which file to include in order to use the member. # The default value is: NO. SHOW_GROUPED_MEMB_INC = NO # If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include # files with double quotes in the documentation rather than with sharp brackets. # The default value is: NO. FORCE_LOCAL_INCLUDES = NO # If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the # documentation for inline members. # The default value is: YES. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the # (detailed) documentation of file and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. # The default value is: YES. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief # descriptions of file, namespace and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. Note that # this will also influence the order of the classes in the class list. # The default value is: NO. SORT_BRIEF_DOCS = NO # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the # (brief and detailed) documentation of class members so that constructors and # destructors are listed first. If set to NO the constructors will appear in the # respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. # Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief # member documentation. # Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting # detailed member documentation. # The default value is: NO. SORT_MEMBERS_CTORS_1ST = NO # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy # of group names into alphabetical order. If set to NO the group names will # appear in their defined order. # The default value is: NO. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by # fully-qualified names, including namespaces. If set to NO, the class list will # be sorted only by class name, not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the alphabetical # list. # The default value is: NO. SORT_BY_SCOPE_NAME = NO # If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper # type resolution of all parameters of a function it will reject a match between # the prototype and the implementation of a member function even if there is # only one candidate or it is obvious which candidate to choose by doing a # simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still # accept a match between prototype and implementation in such cases. # The default value is: NO. STRICT_PROTO_MATCHING = NO # The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo # list. This list is created by putting \todo commands in the documentation. # The default value is: YES. GENERATE_TODOLIST = YES # The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test # list. This list is created by putting \test commands in the documentation. # The default value is: YES. GENERATE_TESTLIST = YES # The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. GENERATE_BUGLIST = YES # The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional documentation # sections, marked by \if ... \endif and \cond # ... \endcond blocks. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the # initial value of a variable or macro / define can have for it to appear in the # documentation. If the initializer consists of more lines than specified here # it will be hidden. Use a value of 0 to hide initializers completely. The # appearance of the value of individual variables and macros / defines can be # controlled using \showinitializer or \hideinitializer command in the # documentation regardless of this setting. # Minimum value: 0, maximum value: 10000, default value: 30. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated at # the bottom of the documentation of classes and structs. If set to YES, the # list will mention the files that were used to generate the documentation. # The default value is: YES. SHOW_USED_FILES = YES # Set the SHOW_FILES tag to NO to disable the generation of the Files page. This # will remove the Files entry from the Quick Index and from the Folder Tree View # (if specified). # The default value is: YES. SHOW_FILES = YES # Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces # page. This will remove the Namespaces entry from the Quick Index and from the # Folder Tree View (if specified). # The default value is: YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed # by doxygen. The layout file controls the global structure of the generated # output files in an output format independent way. To create the layout file # that represents doxygen's defaults, run doxygen with the -l option. You can # optionally specify a file name after the option, if omitted DoxygenLayout.xml # will be used as the name of the layout file. # # Note that if you run doxygen from a directory containing a file called # DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE # tag is left empty. LAYOUT_FILE = config/DoxygenLayout.xml # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool # to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. CITE_BIB_FILES = #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated to # standard output by doxygen. If QUIET is set to YES this implies that the # messages are off. # The default value is: NO. QUIET = YES # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error (stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. # # Tip: Turn warnings on while writing the documentation. # The default value is: YES. WARNINGS = YES # If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate # warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: YES. WARN_IF_UNDOCUMENTED = YES # If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as not documenting some parameters # in a documented function, or documenting parameters that don't exist or using # markup commands wrongly. # The default value is: YES. WARN_IF_DOC_ERROR = YES # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return # value. If set to NO, doxygen will only warn about wrong or incomplete # parameter documentation, but not about the absence of documentation. # The default value is: NO. WARN_NO_PARAMDOC = NO # If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when # a warning is encountered. # The default value is: NO. WARN_AS_ERROR = NO # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which # will be replaced by the file and line number from which the warning originated # and the warning text. Optionally the format may contain $version, which will # be replaced by the version of the file (if it could be obtained via # FILE_VERSION_FILTER) # The default value is: $file:$line: $text. WARN_FORMAT = "$file:$line: $text" # The WARN_LOGFILE tag can be used to specify a file to which warning and error # messages should be written. If left blank the output is written to standard # error (stderr). WARN_LOGFILE = #--------------------------------------------------------------------------- # Configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag is used to specify the files and/or directories that contain # documented source files. You may enter file names like myfile.cpp or # directories like /usr/src/myproject. Separate the files or directories with # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. INPUT = doc/main.md \ c++lib \ core \ core/pythoncdb \ core/cadabra2 \ doc/modules.dox \ core/cadabra2_defaults.py \ core/algorithms \ core/properties \ client_server \ frontend/common \ frontend/gtkmm \ frontend/osx/Cadabra \ frontend/osx/Cadabra/Cadabra # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv # documentation (see: http://www.gnu.org/software/libiconv) for the list of # possible encodings. # The default value is: UTF-8. INPUT_ENCODING = UTF-8 # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and # *.h) to filter out the source-files in the directories. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # read by doxygen. # # If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, # *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, # *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, # *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f, *.for, *.tcl, # *.vhd, *.vhdl, *.ucf, *.qsf, *.as and *.js. FILE_PATTERNS = # The RECURSIVE tag can be used to specify whether or not subdirectories should # be searched for input files as well. # The default value is: NO. RECURSIVE = NO # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. # # Note that relative paths are relative to the directory from which doxygen is # run. EXCLUDE = core/tree.hh \ client_server/tree.hh \ frontend/common/lodepng.cc \ frontend/common/lodepng.h # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded # from the input. # The default value is: NO. EXCLUDE_SYMLINKS = NO # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories for example use the pattern */test/* EXCLUDE_PATTERNS = # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # AClass::ANamespace, ANamespace::*Test # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories use the pattern */test/* EXCLUDE_SYMBOLS = # The EXAMPLE_PATH tag can be used to specify one or more files or directories # that contain example code fragments that are included (see the \include # command). EXAMPLE_PATH = . # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all # files are included. EXAMPLE_PATTERNS = # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude commands # irrespective of the value of the RECURSIVE tag. # The default value is: NO. EXAMPLE_RECURSIVE = NO # The IMAGE_PATH tag can be used to specify one or more files or directories # that contain images that are to be included in the documentation (see the # \image command). IMAGE_PATH = # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command: # # # # where is the value of the INPUT_FILTER tag, and is the # name of an input file. Doxygen will then use the output that the filter # program writes to standard output. If FILTER_PATTERNS is specified, this tag # will be ignored. # # Note that the filter must not add or remove lines; it is applied before the # code is scanned, but not when the output code is generated. If lines are added # or removed, the anchors will not be placed correctly. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. Doxygen will compare the file name with each pattern and apply the # filter if there is a match. The filters are a list of the form: pattern=filter # (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how # filters are used. If the FILTER_PATTERNS tag is empty or if none of the # patterns match the file name, INPUT_FILTER is applied. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER) will also be used to filter the input files that are used for # producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). # The default value is: NO. FILTER_SOURCE_FILES = NO # The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file # pattern. A pattern will override the setting for FILTER_PATTERN (if any) and # it is also possible to disable source filtering for a specific pattern using # *.ext= (so without naming a filter). # This tag requires that the tag FILTER_SOURCE_FILES is set to YES. FILTER_SOURCE_PATTERNS = # If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that # is part of the input, its contents will be placed on the main page # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. USE_MDFILE_AS_MAINPAGE = #--------------------------------------------------------------------------- # Configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will be # generated. Documented entities will be cross-referenced with these sources. # # Note: To get rid of all source code in the generated output, make sure that # also VERBATIM_HEADERS is set to NO. # The default value is: NO. SOURCE_BROWSER = NO # Setting the INLINE_SOURCES tag to YES will include the body of functions, # classes and enums directly into the documentation. # The default value is: NO. INLINE_SOURCES = NO # Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any # special comment blocks from generated source code fragments. Normal C, C++ and # Fortran comments will always remain visible. # The default value is: YES. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES then for each documented # function all documented functions referencing it will be listed. # The default value is: NO. REFERENCED_BY_RELATION = NO # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. # The default value is: NO. REFERENCES_RELATION = NO # If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set # to YES then the hyperlinks from functions in REFERENCES_RELATION and # REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will # link to the documentation. # The default value is: YES. REFERENCES_LINK_SOURCE = YES # If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the # source code will show a tooltip with additional information such as prototype, # brief description and links to the definition and documentation. Since this # will make the HTML file larger and loading of large files a bit slower, you # can opt to disable this feature. # The default value is: YES. # This tag requires that the tag SOURCE_BROWSER is set to YES. SOURCE_TOOLTIPS = YES # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system # (see http://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global # - Enable SOURCE_BROWSER and USE_HTAGS in the config file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # # Doxygen will invoke htags (and that will in turn invoke gtags), so these # tools must be available from the command line (i.e. in the search path). # # The result: instead of the source browser generated by doxygen, the links to # source code will now point to the output of htags. # The default value is: NO. # This tag requires that the tag SOURCE_BROWSER is set to YES. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a # verbatim copy of the header file for each class for which an include is # specified. Set to NO to disable this. # See also: Section \class. # The default value is: YES. VERBATIM_HEADERS = YES # If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the # clang parser (see: http://clang.llvm.org/) for more accurate parsing at the # cost of reduced performance. This can be particularly helpful with template # rich C++ code for which doxygen's built-in parser lacks the necessary type # information. # Note: The availability of this option depends on whether or not doxygen was # generated with the -Duse-libclang=ON option for CMake. # The default value is: NO. CLANG_ASSISTED_PARSING = NO # If clang assisted parsing is enabled you can provide the compiler with command # line options that you would normally use when invoking the compiler. Note that # the include paths will already be set by doxygen for the files and directories # specified with INPUT and INCLUDE_PATH. # This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. CLANG_OPTIONS = #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all # compounds will be generated. Enable this if the project contains a lot of # classes, structs, unions or interfaces. # The default value is: YES. ALPHABETICAL_INDEX = YES # The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in # which the alphabetical index list will be split. # Minimum value: 1, maximum value: 20, default value: 5. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. COLS_IN_ALPHA_INDEX = 5 # In case all classes in a project start with a common prefix, all classes will # be put under the same header in the alphabetical index. The IGNORE_PREFIX tag # can be used to specify a prefix (or a list of prefixes) that should be ignored # while generating the index headers. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. IGNORE_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output # The default value is: YES. GENERATE_HTML = YES # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of # it. # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_OUTPUT = html # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). # The default value is: .html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a user-defined HTML header file for # each generated HTML page. If the tag is left blank doxygen will generate a # standard header. # # To get valid HTML the header file that includes any scripts and style sheets # that doxygen needs, which is dependent on the configuration options used (e.g. # the setting GENERATE_TREEVIEW). It is highly recommended to start with a # default header using # doxygen -w html new_header.html new_footer.html new_stylesheet.css # YourConfigFile # and then modify the file new_header.html. See also section "Doxygen usage" # for information on how to generate the default header that doxygen normally # uses. # Note: The header is subject to change so you typically have to regenerate the # default header when upgrading to a newer version of doxygen. For a description # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_HEADER = # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard # footer. See HTML_HEADER for more information on how to generate a default # footer and what special commands can be used inside the footer. See also # section "Doxygen usage" for information on how to generate the default footer # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FOOTER = # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of # the HTML output. If left blank doxygen will generate a default style sheet. # See also section "Doxygen usage" for information on how to generate the style # sheet that doxygen normally uses. # Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as # it is more robust and this tag (HTML_STYLESHEET) will in the future become # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined # cascading style sheets that are included after the standard style sheets # created by doxygen. Using this option one can overrule certain style aspects. # This is preferred over using HTML_STYLESHEET since it does not replace the # standard style sheet and is therefore more robust against future updates. # Doxygen will copy the style sheet files to the output directory. # Note: The order of the extra style sheet files is of importance (e.g. the last # style sheet in the list overrules the setting of the previous ones in the # list). For an example see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_STYLESHEET = config/DoxygenStyle.css # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note # that these files will be copied to the base HTML output directory. Use the # $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these # files. In the HTML_STYLESHEET file, use the file name only. Also note that the # files will be copied as-is; there are no commands or markers available. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_FILES = # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen # will adjust the colors in the style sheet and background images according to # this color. Hue is specified as an angle on a colorwheel, see # http://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_HUE = 220 # The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors # in the HTML output. For a value of 0 the output will use grayscales only. A # value of 255 will produce the most vivid colors. # Minimum value: 0, maximum value: 255, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_SAT = 100 # The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the # luminance component of the colors in the HTML output. Values below 100 # gradually make the output lighter, whereas values above 100 make the output # darker. The value divided by 100 is the actual gamma applied, so 80 represents # a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not # change the gamma. # Minimum value: 40, maximum value: 240, default value: 80. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_GAMMA = 80 # If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML # page will contain the date and time when the page was generated. Setting this # to YES can help to show when doxygen was last run and thus if the # documentation is up to date. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_TIMESTAMP = YES # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_SECTIONS = NO # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries # shown in the various tree structured indices initially; the user can expand # and collapse entries dynamically later on. Doxygen will expand the tree to # such a level that at most the specified number of entries are visible (unless # a fully collapsed tree already exceeds this amount). So setting the number of # entries 1 will produce a full collapsed tree by default. 0 is a special value # representing an infinite number of entries and will result in a full expanded # tree by default. # Minimum value: 0, maximum value: 9999, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_INDEX_NUM_ENTRIES = 100 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development # environment (see: http://developer.apple.com/tools/xcode/), introduced with # OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a # Makefile in the HTML output directory. Running make will produce the docset in # that directory and running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at # startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html # for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_DOCSET = NO # This tag determines the name of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # The default value is: Doxygen generated docs. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDNAME = "Doxygen generated docs" # This tag specifies a string that should uniquely identify the documentation # set bundle. This should be a reverse domain-name style string, e.g. # com.mycompany.MyDocSet. Doxygen will append .docset to the name. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_BUNDLE_ID = org.doxygen.Project # The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify # the documentation publisher. This should be a reverse domain-name style # string, e.g. com.mycompany.MyDocSet.documentation. # The default value is: org.doxygen.Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_ID = org.doxygen.Publisher # The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. # The default value is: Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop # (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on # Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output # generated by doxygen into a single compiled HTML file (.chm). Compiled HTML # files are now used as the Windows 98 help format, and will replace the old # Windows help format (.hlp) on all Windows platforms in the future. Compressed # HTML files also contain an index, a table of contents, and you can search for # words in the documentation. The HTML workshop also contains a viewer for # compressed HTML files. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_HTMLHELP = NO # The CHM_FILE tag can be used to specify the file name of the resulting .chm # file. You can add a path in front of the file if the result should not be # written to the html output directory. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_FILE = # The HHC_LOCATION tag can be used to specify the location (absolute path # including file name) of the HTML help compiler (hhc.exe). If non-empty, # doxygen will try to run the HTML help compiler on the generated index.hhp. # The file has to be specified with full path. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated # (YES) or that it should be included in the master .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. GENERATE_CHI = NO # The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) # and project file content. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_INDEX_ENCODING = # The BINARY_TOC flag controls whether a binary table of contents is generated # (YES) or a normal table of contents (NO) in the .chm file. Furthermore it # enables the Previous and Next buttons. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members to # the table of contents of the HTML help documentation and to the tree view. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. TOC_EXPAND = NO # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that # can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help # (.qch) of the generated HTML documentation. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify # the file name of the resulting .qch file. The path specified is relative to # the HTML output folder. # This tag requires that the tag GENERATE_QHP is set to YES. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace # (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual # Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- # folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom # Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom # Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: # http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = # The QHG_LOCATION tag can be used to specify the location of Qt's # qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the # generated .qhp file. # This tag requires that the tag GENERATE_QHP is set to YES. QHG_LOCATION = # If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be # generated, together with the HTML files, they form an Eclipse help plugin. To # install this plugin and make it available under the help contents menu in # Eclipse, the contents of the directory containing the HTML and XML files needs # to be copied into the plugins directory of eclipse. The name of the directory # within the plugins directory should be the same as the ECLIPSE_DOC_ID value. # After copying Eclipse needs to be restarted before the help appears. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_ECLIPSEHELP = NO # A unique identifier for the Eclipse help plugin. When installing the plugin # the directory name containing the HTML and XML files should also have this # name. Each documentation set should have its own identifier. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. ECLIPSE_DOC_ID = org.doxygen.Project # If you want full control over the layout of the generated HTML pages it might # be necessary to disable the index and replace it with your own. The # DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top # of each HTML page. A value of NO enables the index and the value YES disables # it. Since the tabs in the index contain the same information as the navigation # tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. DISABLE_INDEX = NO # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag # value is set to YES, a side panel will be generated containing a tree-like # index structure (just like the one that is generated for HTML Help). For this # to work a browser that supports JavaScript, DHTML, CSS and frames is required # (i.e. any modern browser). Windows users are probably better off using the # HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can # further fine-tune the look of the index. As an example, the default style # sheet generated by doxygen has an example that shows how to put an image at # the root of the tree instead of the PROJECT_NAME. Since the tree basically has # the same information as the tab index, you could consider setting # DISABLE_INDEX to YES when enabling this option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_TREEVIEW = NO # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. # # Note that a value of 0 will completely suppress the enum values from appearing # in the overview section. # Minimum value: 0, maximum value: 20, default value: 4. # This tag requires that the tag GENERATE_HTML is set to YES. ENUM_VALUES_PER_LINE = 4 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used # to set the initial width (in pixels) of the frame in which the tree is shown. # Minimum value: 0, maximum value: 1500, default value: 250. # This tag requires that the tag GENERATE_HTML is set to YES. TREEVIEW_WIDTH = 250 # If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to # external symbols imported via tag files in a separate window. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. EXT_LINKS_IN_WINDOW = NO # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML # output directory to force them to be regenerated. # Minimum value: 8, maximum value: 50, default value: 10. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_FONTSIZE = 10 # Use the FORMULA_TRANPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are not # supported properly for IE 6.0, but are supported on all modern browsers. # # Note that when changing this option you need to delete any form_*.png files in # the HTML output directory before the changes have effect. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_TRANSPARENT = YES # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see # http://www.mathjax.org) which uses client side Javascript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path # to it using the MATHJAX_RELPATH option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. USE_MATHJAX = YES # When MathJax is enabled you can set the default output format to be used for # the MathJax output. See the MathJax site (see: # http://docs.mathjax.org/en/latest/output.html) for more details. # Possible values are: HTML-CSS (which is slower, but has the best # compatibility), NativeMML (i.e. MathML) and SVG. # The default value is: HTML-CSS. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_FORMAT = HTML-CSS # When MathJax is enabled you need to specify the location relative to the HTML # output directory using the MATHJAX_RELPATH option. The destination directory # should contain the MathJax.js script. For instance, if the mathjax directory # is located at the same level as the HTML output directory, then # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from http://www.mathjax.org before deployment. # The default value is: http://cdn.mathjax.org/mathjax/latest. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example # MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_EXTENSIONS = # The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces # of code that will be used on startup of the MathJax code. See the MathJax site # (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an # example see the documentation. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_CODEFILE = # When the SEARCHENGINE tag is enabled doxygen will generate a search box for # the HTML output. The underlying search engine uses javascript and DHTML and # should work on any modern browser. Note that when using HTML help # (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) # there is already a search function so this one should typically be disabled. # For large projects the javascript based search engine can be slow, then # enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to # search using the keyboard; to jump to the search box use + S # (what the is depends on the OS and browser, but it is typically # , /