Repository: potentialventures/cocotb Branch: master Commit: 41cacf6d7954 Files: 635 Total size: 3.0 MB Directory structure: gitextract_potnizu6/ ├── .backportrc.json ├── .clang-format ├── .codecov.yml ├── .devcontainer/ │ ├── Dockerfile │ ├── README.md │ ├── devcontainer.json │ └── post-create.sh ├── .git-blame-ignore-revs ├── .github/ │ ├── PULL_REQUEST_TEMPLATE.md │ ├── dependabot.yml │ ├── generate-envs.py │ ├── issue_template.md │ └── workflows/ │ ├── backport.yml │ ├── benchmark.yml │ ├── build-test-dev.yml │ ├── build-test-release.yml │ ├── ecosystem-compat.yml │ ├── experimental.yml │ ├── extended.yml │ ├── regression-tests.yml │ └── stale.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── cocotb_build_libs.py ├── docs/ │ ├── .gitignore │ ├── Doxyfile │ ├── README.md │ └── source/ │ ├── _static/ │ │ ├── cocotb.css │ │ └── cocotb.js │ ├── analog_model.rst │ ├── building.rst │ ├── conf.py │ ├── contributing.rst │ ├── contributors.rst │ ├── coroutines.rst │ ├── coverage.rst │ ├── custom_flows.rst │ ├── developing.rst │ ├── diagrams/ │ │ └── README.md │ ├── examples.rst │ ├── extensions.rst │ ├── further_resources.rst │ ├── genindex.rst │ ├── glossary.rst │ ├── index.rst │ ├── install.rst │ ├── install_devel.rst │ ├── library_reference.rst │ ├── library_reference_c.rst │ ├── maintaining.rst │ ├── newsfragments/ │ │ ├── 4519.feature.rst │ │ ├── 4717.change.rst │ │ ├── 4986.removal.1.rst │ │ ├── 4986.removal.rst │ │ ├── 4987.removal.rst │ │ ├── 5007.feature.1.rst │ │ ├── 5007.feature.2.rst │ │ ├── 5007.feature.rst │ │ ├── 5041.bugfix.rst │ │ ├── 5057.feature.rst │ │ ├── 5076.feature.rst │ │ ├── 5090.feature.rst │ │ ├── 5106.feature.rst │ │ ├── 5114.feature.rst │ │ ├── 5131.feature.rst │ │ ├── 5162.change.rst │ │ ├── 5163.feature.rst │ │ ├── 5165.feature.1.rst │ │ ├── 5165.feature.2.rst │ │ ├── 5165.feature.rst │ │ ├── 5179.change.rst │ │ ├── 5179.feature.1.rst │ │ ├── 5179.feature.rst │ │ ├── 5181.feature.rst │ │ ├── 5182.feature.rst │ │ ├── 5205.feature.rst │ │ ├── 5206.removal.rst │ │ ├── 5207.change.rst │ │ ├── 5220.feature.1.rst │ │ ├── 5220.feature.rst │ │ ├── 5222.feature.rst │ │ ├── 5232.change.rst │ │ ├── 5248.bugfix.rst │ │ ├── 5258.change.rst │ │ ├── 5263.change.rst │ │ ├── 5293.feature.1.rst │ │ ├── 5293.feature.rst │ │ ├── 5306.bugfix.rst │ │ ├── 5306.change.rst │ │ ├── 5309.feature.rst │ │ ├── 5357.feature.rst │ │ ├── 5363.change.rst │ │ ├── 5366.feature.rst │ │ ├── 5380.feature.rst │ │ ├── 5382.change.rst │ │ ├── 5392.feature.rst │ │ ├── 5395.removal.rst │ │ ├── 5415.feature.rst │ │ ├── 5439.bugfix.rst │ │ ├── 5440.bugfix.rst │ │ ├── 5450.feature.rst │ │ ├── 5483.feature.rst │ │ ├── 5506.feature.rst │ │ ├── 5516.change.rst │ │ ├── 5516.removal.1.rst │ │ ├── 5516.removal.rst │ │ └── README.rst │ ├── platform_support.rst │ ├── profiling.rst │ ├── py-modindex.rst │ ├── pytest.rst │ ├── quickstart.rst │ ├── refcard.rst │ ├── regulator.rst │ ├── release_notes.rst │ ├── rescap.rst │ ├── roadmap.rst │ ├── rotating_logger.rst │ ├── runner.rst │ ├── simulator_support.rst │ ├── spelling_wordlist.txt │ ├── support.rst │ ├── timing_model.rst │ ├── troubleshooting.rst │ ├── update_indexing.rst │ ├── upgrade-2.0.rst │ └── writing_testbenches.rst ├── examples/ │ ├── Makefile │ ├── adder/ │ │ ├── hdl/ │ │ │ ├── adder.sv │ │ │ └── adder.vhdl │ │ ├── model/ │ │ │ ├── __init__.py │ │ │ └── adder_model.py │ │ └── tests/ │ │ ├── Makefile │ │ └── test_adder.py │ ├── analog_model/ │ │ ├── Makefile │ │ ├── afe.py │ │ ├── digital.sv │ │ └── test_analog_model.py │ ├── doc_examples/ │ │ └── quickstart/ │ │ ├── Makefile │ │ ├── simple_counter.sv │ │ ├── simple_counter_testcases.py │ │ └── test_runner.py │ ├── matrix_multiplier/ │ │ ├── hdl/ │ │ │ ├── matrix_multiplier.sv │ │ │ ├── matrix_multiplier.vhd │ │ │ └── matrix_multiplier_pkg.vhd │ │ └── tests/ │ │ ├── Makefile │ │ ├── matrix_multiplier_tests.py │ │ └── test_matrix_multiplier.py │ ├── mixed_language/ │ │ ├── hdl/ │ │ │ ├── endian_swapper.sv │ │ │ ├── endian_swapper.vhdl │ │ │ ├── toplevel.sv │ │ │ └── toplevel.vhdl │ │ └── tests/ │ │ ├── Makefile │ │ └── test_mixed_language.py │ ├── mixed_signal/ │ │ ├── .gitignore │ │ ├── hdl/ │ │ │ ├── analog_probe_cadence.sv │ │ │ ├── analog_probe_synopsys.sv │ │ │ ├── capacitor.vams │ │ │ ├── nettypes_pkg_cadence.sv │ │ │ ├── nettypes_pkg_synopsys.sv │ │ │ ├── regulator.sv │ │ │ ├── regulator.vams │ │ │ ├── regulator_block.vams │ │ │ ├── rescap.sv │ │ │ ├── resistor.vams │ │ │ ├── tb_regulator.sv │ │ │ └── tb_rescap.sv │ │ └── tests/ │ │ ├── Makefile │ │ ├── run.scs │ │ ├── test_regulator_plot.py │ │ ├── test_regulator_trim.py │ │ ├── test_rescap.py │ │ ├── test_rescap_minimalist.py │ │ └── vcsAD.init │ └── simple_dff/ │ ├── .gitignore │ ├── Makefile │ ├── dff.sv │ ├── dff.vhdl │ └── test_dff.py ├── noxfile.py ├── pyproject.toml ├── setup.py ├── src/ │ ├── cocotb/ │ │ ├── _ANSI.py │ │ ├── __init__.py │ │ ├── _base_triggers.py │ │ ├── _bridge.py │ │ ├── _concurrent_waiters.py │ │ ├── _decorators.py │ │ ├── _deprecation.py │ │ ├── _event_loop.py │ │ ├── _extended_awaitables.py │ │ ├── _gpi_triggers.py │ │ ├── _init.py │ │ ├── _outcomes.py │ │ ├── _profiling.py │ │ ├── _py_compat.py │ │ ├── _shutdown.py │ │ ├── _task_manager.py │ │ ├── _test_factory.py │ │ ├── _test_manager.py │ │ ├── _utils.py │ │ ├── _vendor/ │ │ │ ├── README.md │ │ │ ├── fli/ │ │ │ │ ├── acc_user.h │ │ │ │ ├── acc_vhdl.h │ │ │ │ └── mti.h │ │ │ ├── tcl/ │ │ │ │ ├── license.terms │ │ │ │ ├── tcl.h │ │ │ │ ├── tclDecls.h │ │ │ │ └── tclPlatDecls.h │ │ │ ├── vhpi/ │ │ │ │ └── vhpi_user.h │ │ │ └── vpi/ │ │ │ ├── sv_vpi_user.h │ │ │ ├── vpi_compatibility.h │ │ │ └── vpi_user.h │ │ ├── _version.py │ │ ├── _xunit_reporter.py │ │ ├── clock.py │ │ ├── debug.py │ │ ├── handle.py │ │ ├── logging.py │ │ ├── py.typed │ │ ├── queue.py │ │ ├── regression.py │ │ ├── result.py │ │ ├── share/ │ │ │ ├── def/ │ │ │ │ ├── .gitignore │ │ │ │ ├── README.md │ │ │ │ ├── aldec.def │ │ │ │ ├── ghdl.def │ │ │ │ ├── icarus.def │ │ │ │ ├── modelsim.def │ │ │ │ └── nvcvhpi.def │ │ │ ├── include/ │ │ │ │ ├── exports.h │ │ │ │ ├── gpi.h │ │ │ │ ├── vhpi_user_ext.h │ │ │ │ └── vpi_user_ext.h │ │ │ └── lib/ │ │ │ ├── gpi/ │ │ │ │ ├── GpiCbHdl.cpp │ │ │ │ ├── GpiCommon.cpp │ │ │ │ ├── dynload.cpp │ │ │ │ ├── fli/ │ │ │ │ │ ├── FliCbHdl.cpp │ │ │ │ │ ├── FliImpl.cpp │ │ │ │ │ ├── FliImpl.hpp │ │ │ │ │ └── FliObjHdl.cpp │ │ │ │ ├── gpi_priv.hpp │ │ │ │ ├── logging.cpp │ │ │ │ ├── logging.hpp │ │ │ │ ├── vhpi/ │ │ │ │ │ ├── VhpiCbHdl.cpp │ │ │ │ │ ├── VhpiImpl.cpp │ │ │ │ │ ├── VhpiImpl.hpp │ │ │ │ │ ├── VhpiIterator.cpp │ │ │ │ │ ├── VhpiObj.cpp │ │ │ │ │ └── VhpiSignal.cpp │ │ │ │ └── vpi/ │ │ │ │ ├── VpiCbHdl.cpp │ │ │ │ ├── VpiImpl.cpp │ │ │ │ ├── VpiImpl.hpp │ │ │ │ ├── VpiIterator.cpp │ │ │ │ ├── VpiObj.cpp │ │ │ │ └── VpiSignal.cpp │ │ │ ├── pygpi/ │ │ │ │ ├── bind.cpp │ │ │ │ ├── embed.cpp │ │ │ │ ├── logging.cpp │ │ │ │ └── pygpi_priv.hpp │ │ │ ├── utils.hpp │ │ │ └── verilator/ │ │ │ └── verilator.cpp │ │ ├── simtime.py │ │ ├── simulator.pyi │ │ ├── task.py │ │ ├── triggers.py │ │ ├── types/ │ │ │ ├── __init__.py │ │ │ ├── _abstract_array.py │ │ │ ├── _array.py │ │ │ ├── _indexing.py │ │ │ ├── _logic.py │ │ │ ├── _logic_array.py │ │ │ ├── _range.py │ │ │ └── _resolve.py │ │ └── utils.py │ ├── cocotb_tools/ │ │ ├── __init__.py │ │ ├── _coverage.py │ │ ├── _env.py │ │ ├── _vendor/ │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ └── distutils_version.py │ │ ├── check_results.py │ │ ├── combine_results.py │ │ ├── config.py │ │ ├── ipython_support.py │ │ ├── makefiles/ │ │ │ ├── Makefile.deprecations │ │ │ ├── Makefile.inc │ │ │ ├── Makefile.sim │ │ │ └── simulators/ │ │ │ ├── Makefile.activehdl │ │ │ ├── Makefile.cvc │ │ │ ├── Makefile.dsim │ │ │ ├── Makefile.ghdl │ │ │ ├── Makefile.icarus │ │ │ ├── Makefile.ius │ │ │ ├── Makefile.modelsim │ │ │ ├── Makefile.nvc │ │ │ ├── Makefile.questa │ │ │ ├── Makefile.questa-compat │ │ │ ├── Makefile.questa-qisqrun │ │ │ ├── Makefile.riviera │ │ │ ├── Makefile.vcs │ │ │ ├── Makefile.verilator │ │ │ └── Makefile.xcelium │ │ ├── py.typed │ │ ├── pytest/ │ │ │ ├── __init__.py │ │ │ ├── _compat.py │ │ │ ├── _controller.py │ │ │ ├── _fixture.py │ │ │ ├── _handle.py │ │ │ ├── _init.py │ │ │ ├── _junitxml.py │ │ │ ├── _logging.py │ │ │ ├── _option.py │ │ │ ├── _regression.py │ │ │ ├── _runner.py │ │ │ ├── _test.py │ │ │ ├── _testbench.py │ │ │ ├── hdl.py │ │ │ ├── hookspecs.py │ │ │ ├── mark.py │ │ │ ├── plugin.py │ │ │ └── py.typed │ │ ├── runner.py │ │ └── sim_versions.py │ ├── pygpi/ │ │ ├── __init__.py │ │ ├── entry.py │ │ └── py.typed │ └── pyproject.toml └── tests/ ├── Makefile ├── benchmarks/ │ ├── test_matrix_multiplier.py │ └── test_parameterize_perf/ │ ├── parametrize_perf_top.sv │ ├── parametrize_performance_tests.py │ └── test_parameterize_perf.py ├── designs/ │ ├── array_module/ │ │ ├── Makefile │ │ ├── array_module.sv │ │ ├── array_module.vhd │ │ └── array_module_pack.vhd │ ├── basic_hierarchy_module/ │ │ ├── Makefile │ │ └── basic_hierarchy_module.v │ ├── multi_dimension_array/ │ │ ├── Makefile │ │ ├── cocotb_array.sv │ │ └── cocotb_array_pkg.sv │ ├── plusargs_module/ │ │ ├── Makefile │ │ ├── tb_top.v │ │ └── tb_top.vhd │ ├── runner/ │ │ ├── runner.sv │ │ └── runner.vhdl │ ├── runner_defines/ │ │ └── runner_defines.sv │ ├── sample_module/ │ │ ├── Makefile │ │ ├── sample_module.sv │ │ ├── sample_module.vhdl │ │ ├── sample_module_1.sv │ │ ├── sample_module_1.vhdl │ │ └── sample_module_package.vhdl │ ├── uart2bus/ │ │ ├── Makefile │ │ ├── README │ │ ├── top/ │ │ │ ├── verilog_toplevel.sv │ │ │ └── vhdl_toplevel.vhdl │ │ ├── verilog/ │ │ │ ├── baud_gen.v │ │ │ ├── uart2bus_top.v │ │ │ ├── uart_parser.v │ │ │ ├── uart_rx.v │ │ │ ├── uart_top.v │ │ │ └── uart_tx.v │ │ └── vhdl/ │ │ ├── baudGen.vhd │ │ ├── uart2BusTop.vhd │ │ ├── uart2BusTop_pkg.vhd │ │ ├── uartParser.vhd │ │ ├── uartRx.vhd │ │ ├── uartTop.vhd │ │ └── uartTx.vhd │ ├── verilator_timing/ │ │ ├── Makefile │ │ ├── README.md │ │ └── test_verilator_timing.sv │ ├── vhdl_configurations/ │ │ ├── Makefile │ │ ├── configurations.vhd │ │ ├── dut.vhd │ │ ├── testbench.sv │ │ └── testbench.vhd │ └── viterbi_decoder_axi4s/ │ ├── Makefile │ ├── gpl-2.0.txt │ ├── packages/ │ │ ├── pkg_components.vhd │ │ ├── pkg_helper.vhd │ │ ├── pkg_param.vhd │ │ ├── pkg_param_derived.vhd │ │ ├── pkg_trellis.vhd │ │ └── pkg_types.vhd │ └── src/ │ ├── acs.vhd │ ├── axi4s_buffer.vhd │ ├── branch_distance.vhd │ ├── dec_viterbi.vhd │ ├── generic_sp_ram.vhd │ ├── ram_ctrl.vhd │ ├── recursion.vhd │ ├── reorder.vhd │ └── traceback.vhd ├── pytest/ │ ├── test_array.py │ ├── test_cocotb.py │ ├── test_env.py │ ├── test_logging_with_envs.py │ ├── test_logic.py │ ├── test_logic_array.py │ ├── test_logs.py │ ├── test_parallel_cocotb.py │ ├── test_parameterize.py │ ├── test_plusargs.py │ ├── test_range.py │ ├── test_runner.py │ ├── test_timescale.py │ ├── test_version.py │ ├── test_vhdl_libraries_multiple.py │ └── test_waves.py ├── pytest_plugin/ │ ├── conftest.py │ ├── test_caplog.py │ ├── test_cocotb.py │ ├── test_cocotb_top.py │ ├── test_end_test.py │ ├── test_fixture.py │ ├── test_parametrize.py │ ├── test_sample_module.py │ ├── test_sample_module_1.py │ ├── test_sample_module_2.py │ ├── test_session.py │ ├── test_timeout.py │ └── test_xfail.py ├── sxs.ps1 └── test_cases/ ├── test_array/ │ ├── Makefile │ └── test_array.py ├── test_array_simple/ │ ├── Makefile │ └── test_array_simple.py ├── test_async_bridge/ │ ├── Makefile │ └── test_async_bridge.py ├── test_cocotb/ │ ├── Makefile │ ├── common.py │ ├── pytest_assertion_rewriting.py │ ├── test_async_coroutines.py │ ├── test_async_generators.py │ ├── test_ci.py │ ├── test_clock.py │ ├── test_deprecated.py │ ├── test_edge_triggers.py │ ├── test_first_combine.py │ ├── test_handle.py │ ├── test_logging.py │ ├── test_queues.py │ ├── test_scheduler.py │ ├── test_sim_time_utils.py │ ├── test_start_soon.py │ ├── test_synchronization_primitives.py │ ├── test_task_manager.py │ ├── test_testfactory.py │ ├── test_tests.py │ ├── test_timing_triggers.py │ └── test_waiters.py ├── test_compare/ │ ├── Makefile │ └── test_compare.py ├── test_configuration/ │ ├── Makefile │ └── test_configurations.py ├── test_custom_entry/ │ ├── .gitignore │ ├── Makefile │ ├── custom_entry.py │ └── expected_results.log ├── test_deadlock/ │ ├── Makefile │ └── test_deadlock.py ├── test_defaultless_parameter/ │ ├── Makefile │ ├── test_defaultless_parameter.py │ └── test_defaultless_parameter.sv ├── test_discovery/ │ ├── Makefile │ └── test_discovery.py ├── test_dumpfile_verilator/ │ ├── Makefile │ ├── test_dumpfile_verilator.py │ └── test_dumpfile_verilator.sv ├── test_exit_error/ │ ├── Makefile │ └── test_exit.py ├── test_failure/ │ ├── Makefile │ └── test_failure.py ├── test_fatal/ │ ├── Makefile │ ├── fatal.sv │ ├── fatal.vhd │ └── test_fatal.py ├── test_first_on_coincident_triggers/ │ ├── Makefile │ ├── test.sv │ ├── test.vhd │ └── test_first_on_coincident_triggers.py ├── test_force_release/ │ ├── Makefile │ └── test_force_release.py ├── test_forked_exception/ │ ├── Makefile │ └── test_forked_exception.py ├── test_gpi_extra_bad_lib/ │ ├── Makefile │ └── test_simple.py ├── test_gpi_users_bad_lib/ │ ├── Makefile │ └── test_simple.py ├── test_gpi_users_notset/ │ ├── Makefile │ └── test_simple.py ├── test_indexing_warning/ │ ├── Makefile │ ├── indexing_warning_tests.py │ ├── test_indexing_warning.py │ └── top.sv ├── test_inertial_writes/ │ ├── Makefile │ └── inertial_writes_tests.py ├── test_integers/ │ ├── Makefile │ ├── integer_tests.py │ ├── integers.sv │ ├── integers.vhdl │ ├── integers_pkg.vhdl │ └── test_integers.py ├── test_iteration_mixedlang/ │ ├── Makefile │ └── test_iteration.py ├── test_iteration_verilog/ │ ├── Makefile │ ├── endian_swapper.sv │ └── test_iteration_es.py ├── test_iteration_vhdl/ │ ├── Makefile │ └── test_iteration.py ├── test_kill_sim/ │ ├── Makefile │ └── kill_sim_tests.py ├── test_listing/ │ ├── .gitignore │ ├── Makefile │ ├── check_results.py │ ├── test_listing_1.py │ └── test_listing_2.py ├── test_log_prefix/ │ ├── Makefile │ ├── log_prefix_tests.py │ ├── test_log_prefix.py │ └── top.sv ├── test_logic_array_indexing/ │ ├── Makefile │ ├── test_logic_array_indexing.py │ ├── test_logic_array_indexing.v │ └── test_logic_array_indexing.vhdl ├── test_long_log_msg/ │ ├── .gitignore │ ├── Makefile │ ├── test.sv │ ├── test.vhd │ └── test_long_log_msg.py ├── test_max_failures/ │ ├── Makefile │ └── test_max_failures.py ├── test_module_var_empty/ │ └── Makefile ├── test_module_var_messy/ │ ├── Makefile │ └── test_nothing.py ├── test_module_without_tests/ │ ├── Makefile │ └── test_nothing.py ├── test_multi_dimension_array/ │ ├── Makefile │ └── test_cocotb_array.py ├── test_multi_level_module_path/ │ ├── Makefile │ ├── __init__.py │ └── test_package/ │ ├── __init__.py │ └── test_module_path.py ├── test_null_ranges/ │ ├── Makefile │ ├── null_ranges.vhdl │ ├── null_ranges_pkg.vhdl │ └── test_null_ranges.py ├── test_one_empty_test/ │ ├── Makefile │ └── test_one_empty_test.py ├── test_package/ │ ├── Makefile │ ├── cocotb_package.sv │ ├── cocotb_package_pkg.sv │ └── test_package.py ├── test_packed_union/ │ ├── Makefile │ ├── test_packed_union.py │ └── test_packed_union.sv ├── test_plusargs/ │ ├── Makefile │ └── plusargs.py ├── test_random_test_order/ │ ├── Makefile │ └── test_random_test_order.py ├── test_seed/ │ ├── .gitignore │ ├── Makefile │ ├── test_other.py │ └── test_seed.py ├── test_select_testcase/ │ ├── Makefile │ ├── x_tests.py │ ├── y_tests.py │ └── y_tests_again.py ├── test_select_testcase_error/ │ ├── Makefile │ └── x_tests.py ├── test_similar_scope_name/ │ ├── Makefile │ ├── test.sv │ ├── test.vhd │ └── test_similar_scope_name.py ├── test_skip/ │ ├── Makefile │ └── test_skip.py ├── test_skipped_explicitly_run/ │ ├── Makefile │ └── test_skipped_explicitly_run.py ├── test_struct/ │ ├── Makefile │ └── test_struct.py ├── test_sv_interface/ │ ├── Makefile │ ├── test_sv_if.py │ └── top.sv ├── test_test_filter/ │ ├── Makefile │ └── x_tests.py ├── test_toplevel_architecture_same_as_entity/ │ ├── Makefile │ ├── test.vhdl │ └── test_toplevel_architecture_same_as_entity.py ├── test_toplevel_library/ │ ├── Makefile │ ├── mylib.vhd │ └── test_myentity.py ├── test_verilator_timing_a/ │ ├── Makefile │ └── test_verilator_timing_a.py ├── test_verilator_timing_b/ │ ├── Makefile │ └── test_verilator_timing_b.py ├── test_verilator_timing_c/ │ ├── Makefile │ ├── check_version │ └── test_verilator_timing_c.py ├── test_verilator_timing_d/ │ ├── Makefile │ └── test_verilator_timing_d.py ├── test_verilog_access/ │ ├── Makefile │ └── test_verilog_access.py ├── test_verilog_include_dirs/ │ ├── Makefile │ ├── common/ │ │ ├── a.vh │ │ └── b.vh │ ├── const_stream/ │ │ └── c.vh │ ├── simple_and.sv │ └── test_verilog_include_dirs.py ├── test_vhdl_access/ │ ├── Makefile │ └── test_vhdl_access.py ├── test_vhdl_integer/ │ ├── Makefile │ ├── test_vhdl_integer.py │ └── vhdl_integer.vhdl ├── test_vhdl_libraries/ │ ├── Makefile │ ├── a.vhdl │ ├── b.vhdl │ └── test_ab.py ├── test_vhdl_libraries_multiple/ │ ├── Makefile │ ├── a.vhdl │ ├── b.vhdl │ ├── c.vhdl │ ├── d.vhdl │ ├── e.vhdl │ └── test_abcde.py └── test_xfail/ ├── Makefile └── test_xfail.py ================================================ FILE CONTENTS ================================================ ================================================ FILE: .backportrc.json ================================================ // Documentation at // https://github.com/sorenlouv/backport/blob/main/docs/config-file-options.md // Comments are allowed, trailing commas are not. { "repoOwner": "cocotb", "repoName": "cocotb", // Branches to backport to. "targetBranchChoices": [ "stable/1.9" ], // Use `backport-to:VERSION` as indication of the target branch. // Also update .github/workflows/backport.yml when changing the label here. "branchLabelMapping": { "^backport-to:(\\d+\\.\\d+)$": "stable/$1" }, // Labels assigned to the source PR after opening the backport PR(s). "sourcePRLabels": ["status:backport-created"], // In GitHub PR comments made by the bot suggest that users run // "npx backport", which automatically installs backport if necessary. "backportBinary": "npx backport", // Leave a note in the source PR if a backport failed. "publishStatusCommentOnFailure": true, // Title of the backport PR(s). "prTitle": "[{{targetBranch}}] Backport PR #{{sourcePullRequest.number}}: {{sourcePullRequest.title}}", // Labels added to the newly created backport PR(s). "targetPRLabels": ["type:backport"], // Default reviewers. "reviewers": ["cocotb/maintainers"] } ================================================ FILE: .clang-format ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause BasedOnStyle: Google IndentWidth: 4 AccessModifierOffset: -2 PointerAlignment: Right ================================================ FILE: .codecov.yml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause # Codecov configuration codecov: notify: # Codecov claims [1] to wait until all CI runs have completed, but that # doesn't work for our CI setup, causing early reports to come in which # indicate a drop in coverage. These reports are later updated as more # reports come in. But by that time the first issue comment has already # been made and an email has been sent. Prevent that by explicitly # specifying the number of builds that need to be uploaded to codecov # [2]. # # Keep this number in sync with the CI configuration! # # The number should be the same as the total number of tests belonging # to "ci-free" and "ci-licensed". # # [1] https://docs.codecov.io/docs/merging-reports#how-does-codecov-know-when-to-send-notifications # [2] https://docs.codecov.io/docs/notifications#preventing-notifications-until-after-n-builds after_n_builds: 24 coverage: status: project: default: # Report a CI failure if coverage drops by more than 1 percent. threshold: 1% ================================================ FILE: .devcontainer/Dockerfile ================================================ ARG VERILATOR_VERSION=5.044 ARG UV_VERSION=0.10.2 # Container images used only to copy binaries out of it (see below). FROM verilator/verilator:v${VERILATOR_VERSION} AS verilator FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv # See https://github.com/devcontainers/images/tree/main/src/base-ubuntu/history # for a description of what's in this base image. FROM mcr.microsoft.com/devcontainers/base:2-ubuntu-24.04 RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt-get -y install --no-install-recommends \ bear \ bison \ build-essential \ ccache \ clang \ clangd \ colordiff \ doxygen \ flex \ gdb \ gdb \ gh \ ghdl \ git \ git-absorb \ gperf \ graphviz \ iverilog \ libenchant-2-dev \ lldb \ pre-commit \ python3-dev \ python3-pip \ python3-venv \ valgrind \ && apt-get clean && rm -rf /var/lib/apt/lists/* # Install UV from the official uv container image. COPY --from=uv /uv /uvx /usr/local/bin/ # Install Verilator from official Verilator container image. COPY --from=verilator /usr/local/share/verilator /usr/local/share/verilator COPY --from=verilator /usr/local/bin/verilator* /usr/local/bin/ COPY --from=verilator /usr/local/share/man /usr/local/share/man COPY --from=verilator /usr/local/share/pkgconfig/verilator.pc /usr/local/share/pkgconfig # Install NVC from an upstream Debian package. # (Use apt instead of dpkg to resolve dependencies automatically.) ARG NVC_VERSION=1.18.2 ARG NVC_SHA256=bb6f19a84a398e13c56649996262db63ed9787e19dd0ad894774e132e170b785 RUN mkdir -p /tmp/nvc-install \ && curl -SLo /tmp/nvc-install/nvc.deb "https://github.com/nickg/nvc/releases/download/r${NVC_VERSION}/nvc_${NVC_VERSION}-1_amd64_ubuntu-24.04.deb" \ && echo "${NVC_SHA256} /tmp/nvc-install/nvc.deb" | sha256sum --check \ && apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt -y install --no-install-recommends /tmp/nvc-install/nvc.deb \ && apt-get clean && rm -rf /var/lib/apt/lists/* \ && rm -rf /tmp/nvc-install ================================================ FILE: .devcontainer/README.md ================================================ # Cocotb Development Environment in a Dev Container The Dev Container provides a ready-to-code development environment for cocotb on Windows, Mac, or Linux. Dev Containers combine VS Code with (Docker) containers and configuration within the cocotb repository. By default, Dev Containers cannot access files on your local machine; if you want to use proprietary simulators installed on your machine, you might be better off with a development environment on your machine itself. ## What's included in the Dev Container? * Ubuntu 24.04 * Compilers: GCC and clang * Open-source simulators: GHDL, Icarus Verilog, Verilator, NVC * Productivity tools: GDB, LLDB, Valgrind, [Bear](https://github.com/rizsotto/Bear) ## Getting started You need VS Code and the Dev Containers extension to run the Dev Container on your local machine. Follow the [Dev Containers Getting Started documentation](https://code.visualstudio.com/docs/devcontainers/containers#_getting-started) documentation to install all required tools. Then start the Dev Container: * Open VS Code * Open the command palette (F1) * Type `Dev Containers: Clone Repository in Container Volume...` and press ENTER. * Enter the repository URL `https://github.com/cocotb/cocotb` and press ENTER again. * Wait for a short moment until the Dev Container is ready to be used. Note: On Linux you can alternatively clone the Git repository on your local machine and open the folder in a container. We don't recommend that on Windows or Mac to get good filesystem performance ([learn more](https://code.visualstudio.com/remote/advancedcontainers/improve-performance)). After the Dev Container startup completed **open a new terminal** to run a first cocotb test. Note: *Do not* reuse the `Welcome to Codespaces` terminal you might see -- it does not have an active Python venv and commands will fail. ``` cd examples/simple_dff make WAVES=1 SIM=iverlog code sim_build/dff.fst ``` ================================================ FILE: .devcontainer/devcontainer.json ================================================ // VS Code Dev Container configuration file for a cocotb development environment. { "name": "cocotb development", "build": { "context": "..", "dockerfile": "Dockerfile" }, "postCreateCommand": ".devcontainer/post-create.sh", "postAttachCommand": "less .devcontainer/README.md", "customizations": { "vscode": { "extensions": [ "llvm-vs-code-extensions.vscode-clangd", "ms-python.python", "ms-python.vscode-pylance", "lramseyer.vaporview" ], "settings": { "tasks": { // See https://go.microsoft.com/fwlink/?LinkId=733558 // for the documentation about the tasks.json format "version": "2.0.0", "tasks": [ { "label": "Preview documentation", "type": "shell", // Run the docs_preview nox session with the virtual environment's Python // (if it exists). "command": "PATH=.venv/bin:$PATH nox -s docs_preview", "isBackground": true } ] }, "files.exclude": { "**/.venv": true } } } } } ================================================ FILE: .devcontainer/post-create.sh ================================================ #!/bin/bash set -eo pipefail # Create and activate a virtual environment. python3 -m venv --prompt cocotb-devenv .venv . .venv/bin/activate # Install prerequisites and development tools. pre-commit install pip3 install nox pytest # Install cocotb in editable mode. bear -- pip3 install -e . ================================================ FILE: .git-blame-ignore-revs ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause # The following command will configure your local git repo to ignore these commits # when doing git-blame. # git config blame.ignoreRevsFile .git-blame-ignore-revs # clang-format 78e69fa428477b73808d08aec0e6702a924497f8 # black and isort 720b0e1071d0c720e61fa50fcb1813fd99198f20 # ruff eb254d7581b64e5384ee87f49d4363d72be6e20a 785e0896930039023db3c2e2be47bed5375326c5 0f943a324a997bb31dcd55831b51b393d5435965 ================================================ FILE: .github/PULL_REQUEST_TEMPLATE.md ================================================ ================================================ FILE: .github/dependabot.yml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" cooldown: default-days: 7 - package-ecosystem: "uv" directory: "/" schedule: interval: "weekly" cooldown: default-days: 7 versioning-strategy: lockfile-only groups: updates: applies-to: version-updates patterns: - "*" update-types: - "minor" - "patch" ================================================ FILE: .github/generate-envs.py ================================================ #!/usr/bin/env python3 # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause """Generate a list of test environments. Each environment must contain the following fields: - lang: The TOPLEVEL_LANG of the test. Must be one of "verilog" or "vhdl". - sim: The SIM of the test. Must be one of "icarus", "ghdl", "nvc", "verilator", "riviera", "questa", "xcelium", or "vcs". - sim-version: The version of the simulator to use. Valid values depend upon the simulator and build recipe. - os: The OS to operate on. Must be a valid value for the "jobs..runs-on" field for Github Actions. - python-version: The Python version to test with. Must be a valid value for the "python-version" field of the "actions/setup-python" Github Action. - group: The group to run the test in. One of "ci-free", "ci-licensed", "experimental", or "extended". See below note. Optional fields: - self-hosted: True if test needs to be run on a self-hosted Github Action runner. Default: False. - cc: C compiler and linker to use. Default: gcc. - cxx: C++ compiler and linker to use. Default: g++. - extra_name: Additional tag prepended to computed name for test. Default: . What tests belong in what groups: - ci-free: The most recent stable release of a given free simulator, all supported versions of Python, and all supported operating systems. Run on all PRs and master pushes. - ci-licensed: The most recent stable release of a given licensed simulator. Run on all PRs and master pushes in the cocotb repo, but are skipped in forks. - experimental: Development HEAD for each simulator, any under-development version of Python, and under-development simulator. Run weekly. - extended: The minimum supoprted version of a simulator, and a smattering of released simulator versions between the minimum and most recent. Run weekly. Ideally, whenever a new version of a simulator is released, a new test should be added for that simulator. The current test in the "ci-free"/"ci-licensed" group should be moved to "extended", and the new version should be added to "ci-free"/"ci-licensed" and any changes in behavior recorded with expectations to make CI pass. """ from __future__ import annotations import argparse import json import sys ENVS = [ # Test different Python versions with package managed Icarus on Ubuntu { "lang": "vhdl", "sim": "nvc", "sim-version": "r1.19.2", "os": "ubuntu-22.04", "python-version": "3.9", "group": "ci-free", }, { "lang": "vhdl", "sim": "nvc", "sim-version": "r1.19.2", "os": "ubuntu-22.04", "python-version": "3.10", "group": "ci-free", }, { "lang": "vhdl", "sim": "nvc", "sim-version": "r1.19.2", "os": "ubuntu-22.04", "python-version": "3.11", "group": "ci-free", }, { "lang": "vhdl", "sim": "nvc", "sim-version": "r1.19.2", "os": "ubuntu-22.04", "python-version": "3.12", "group": "ci-free", }, { "lang": "vhdl", "sim": "nvc", "sim-version": "r1.19.2", "os": "ubuntu-22.04", "python-version": "3.13", "group": "ci-free", }, { "lang": "vhdl", "sim": "nvc", "sim-version": "r1.19.2", "os": "ubuntu-22.04", "python-version": "3.14", "group": "ci-free", }, # { # "lang": "vhdl", # "sim": "nvc", # "sim-version": "r1.17.1", # "os": "ubuntu-22.04", # "python-version": "3.15", # "group": "experimental", # }, # Test Icarus on Ubuntu { "lang": "verilog", "sim": "icarus", "sim-version": "v11_0", # Minimum supported version "os": "ubuntu-22.04", "python-version": "3.9", "group": "extended", }, { "lang": "verilog", "sim": "icarus", "sim-version": "v13_0", # The latest release version. "os": "ubuntu-22.04", "python-version": "3.9", "group": "ci-free", }, { "lang": "verilog", "sim": "icarus", "sim-version": "master", "os": "ubuntu-22.04", "python-version": "3.9", "group": "experimental", }, # Test GHDL on Ubuntu { "lang": "vhdl", "sim": "ghdl", "sim-version": "v2.0.0", # GHDL 2.0 is the minimum supported version. "os": "ubuntu-22.04", "python-version": "3.9", "group": "extended", }, { "lang": "vhdl", "sim": "ghdl", "sim-version": "v5.1.1", # The latest release version. "os": "ubuntu-22.04", "python-version": "3.9", "group": "ci-free", }, { "lang": "vhdl", "sim": "ghdl", "sim-version": "master", "os": "ubuntu-22.04", "python-version": "3.9", "group": "experimental", }, # Testing latest release is covered by the Python version tests { "lang": "vhdl", "sim": "nvc", "sim-version": "master", "os": "ubuntu-22.04", "python-version": "3.9", "group": "experimental", }, # Test Verilator on Ubuntu { "lang": "verilog", "sim": "verilator", "sim-version": "v5.046", # Latest release version. "os": "ubuntu-22.04", "python-version": "3.10", "group": "ci-free", }, { "lang": "verilog", "sim": "verilator", "sim-version": "master", "os": "ubuntu-22.04", "python-version": "3.10", "group": "experimental", }, # Test other OSes # Icarus homebrew (ARM64) { "lang": "verilog", "sim": "icarus", "sim-version": "homebrew-stable", "os": "macos-14", "python-version": "3.9", "group": "ci-free", }, # Icarus homebrew (ARM64) (HEAD/master) { "lang": "verilog", "sim": "icarus", "sim-version": "homebrew-HEAD", "os": "macos-14", "python-version": "3.9", "group": "experimental", }, # Verilator macOS (ARM64) HEAD { "lang": "verilog", "sim": "verilator", "sim-version": "master", "os": "macos-14", "python-version": "3.9", "group": "experimental", }, # Verilator macOS (ARM64) latest release { "lang": "verilog", "sim": "verilator", "sim-version": "v5.046", "os": "macos-14", "python-version": "3.9", "group": "ci-free", }, # Icarus homebrew (x86) { "lang": "verilog", "sim": "icarus", "sim-version": "homebrew-stable", "os": "macos-15-intel", "python-version": "3.9", "group": "ci-free", }, # Icarus windows from source { "lang": "verilog", "sim": "icarus", "sim-version": "v13_0", "os": "windows-latest", "python-version": "3.11", "toolchain": "mingw", "extra-name": "mingw", "group": "ci-free", }, # use msvc instead of mingw { "lang": "verilog", "sim": "icarus", "sim-version": "v13_0", "os": "windows-latest", "python-version": "3.11", "toolchain": "msvc", "extra-name": "msvc", "group": "ci-free", }, # NVC on windows { "lang": "vhdl", "sim": "nvc", "sim-version": "r1.19.2", "os": "windows-latest", "python-version": "3.11", "group": "ci-free", }, # Other # use clang instead of gcc { "lang": "vhdl", "sim": "nvc", "sim-version": "r1.19.2", "os": "ubuntu-22.04", "python-version": "3.9", "cxx": "clang++", "cc": "clang", "extra-name": "clang", "group": "ci-free", }, # Test Siemens Questa on Ubuntu { "lang": "verilog", "sim": "questa", "sim-version": "siemens/questa/2025.2", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "ci-licensed", }, { "lang": "vhdl and fli", "sim": "questa", "sim-version": "siemens/questa/2025.2", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "ci-licensed", }, { "lang": "vhdl and vhpi", "sim": "questa", "sim-version": "siemens/questa/2025.2", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "ci-licensed", }, # Test Aldec Riviera-PRO on Ubuntu { "lang": "verilog", "sim": "riviera", "sim-version": "aldec/rivierapro/2025.10", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "ci-licensed", }, { "lang": "vhdl", "sim": "riviera", "sim-version": "aldec/rivierapro/2025.10", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "ci-licensed", }, # Test Cadence Xcelium on Ubuntu { "lang": "verilog", "sim": "xcelium", "sim-version": "cadence/xcelium/2509", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "ci-licensed", }, { "lang": "vhdl", "sim": "xcelium", "sim-version": "cadence/xcelium/2509", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "ci-licensed", }, # Test Synopsys VCS on Ubuntu { "lang": "verilog", "sim": "vcs", "sim-version": "synopsys/vcs/X-2025.06", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "ci-licensed", }, { "lang": "vhdl", "sim": "vcs", "sim-version": "synopsys/vcs/X-2025.06", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "experimental", }, ] ghdl_versions = ("v3.0.0", "v4.1.0") for version in ghdl_versions: ENVS += [ { "lang": "vhdl", "sim": "ghdl", "sim-version": version, "os": "ubuntu-22.04", "python-version": "3.9", "group": "extended", }, ] icarus_versions = ("v12_0",) for version in icarus_versions: ENVS += [ { "lang": "verilog", "sim": "icarus", "sim-version": version, "os": "ubuntu-22.04", "python-version": "3.9", "group": "extended", }, ] verilator_versions = ("v5.036", "v5.038", "v5.040", "v5.042", "v5.044") for version in verilator_versions: ENVS += [ { "lang": "verilog", "sim": "verilator", "sim-version": version, "os": "ubuntu-22.04", "python-version": "3.9", "group": "extended", }, ] nvc_versions = ( "r1.11.0", "r1.12.2", "r1.13.3", "r1.14.2", "r1.15.2", "r1.16.0", # First version with --preserve-case "r1.17.1", "r1.18.2", ) for version in nvc_versions: ENVS += [ { "lang": "vhdl", "sim": "nvc", "sim-version": version, "os": "ubuntu-22.04", "python-version": "3.9", "group": "extended", }, ] # Questa: test more versions as part of the extended tests. questa_versions_novhpi = ("2021.2", "2021.3", "2021.4", "2022.1", "2022.2") questa_versions_vhpi = ( "2022.3", "2022.4", "2023.1", "2023.2", "2023.4", "2024.1", "2024.2", ) for version in questa_versions_novhpi + questa_versions_vhpi: ENVS += [ { "lang": "verilog", "sim": "questa", "sim-version": f"siemens/questa/{version}", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "extended", }, { "lang": "vhdl and fli", "sim": "questa", "sim-version": f"siemens/questa/{version}", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "extended", }, ] for version in questa_versions_vhpi: ENVS += [ { "lang": "vhdl and vhpi", "sim": "questa", "sim-version": f"siemens/questa/{version}", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "extended", }, ] # Riviera-PRO: test more versions as part of the extended tests. riviera_versions = ( "2019.10", "2020.04", "2020.10", "2021.04", "2021.10", "2022.04", "2023.10", "2024.04", "2024.10", "2025.04", ) for version in riviera_versions: ENVS += [ { "lang": "verilog", "sim": "riviera", "sim-version": f"aldec/rivierapro/{version}", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "extended", }, { "lang": "vhdl", "sim": "riviera", "sim-version": f"aldec/rivierapro/{version}", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "extended", }, ] # Xcelium: test more versions as part of the extended tests. xcelium_versions = ("2309", "2403", "2503") for version in xcelium_versions: ENVS += [ { "lang": "verilog", "sim": "xcelium", "sim-version": f"cadence/xcelium/{version}", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "extended", }, { "lang": "vhdl", "sim": "xcelium", "sim-version": f"cadence/xcelium/{version}", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "extended", }, ] # VCS: test more versions as part of the extended tests. vcs_versions = ("W-2024.09",) for version in vcs_versions: ENVS += [ { "lang": "verilog", "sim": "vcs", "sim-version": f"synopsys/vcs/{version}", "os": "ubuntu-22.04", "self-hosted": True, "python-version": "3.9", "group": "extended", }, # Don't run extended tests for VCS/VHDL yet until we have a version that # works. ] def append_str_val(listref, my_list, key) -> None: if key not in my_list: return listref.append(str(my_list[key])) def main() -> int: parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("--group") parser.add_argument("--output-format", choices=("gha", "json"), default="json") parser.add_argument( "--gha-output-file", type=argparse.FileType("a", encoding="utf-8"), help="The $GITHUB_OUTPUT file.", ) args = parser.parse_args() if args.group is not None and args.group != "": selected_envs = [t for t in ENVS if "group" in t and t["group"] == args.group] else: # Return all tasks if no group is selected. selected_envs = ENVS for env in selected_envs: # The "runs-on" job attribute is a string if we're using the GitHub- # provided hosted runners, or an array with special keys if we're # using self-hosted runners. if "self-hosted" in env and env["self-hosted"] and "runs-on" not in env: env["runs-on"] = ["self-hosted", f"cocotb-private-{env['os']}"] else: env["runs-on"] = env["os"] # Assemble the human-readable name of the job. name_parts = [] append_str_val(name_parts, env, "extra-name") append_str_val(name_parts, env, "sim") if "/" in env["sim-version"]: # Shorten versions like 'siemens/questa/2023.2' to '2023.2'. name_parts.append(env["sim-version"].split("/")[-1]) else: name_parts.append(env["sim-version"]) append_str_val(name_parts, env, "lang") append_str_val(name_parts, env, "os") append_str_val(name_parts, env, "python-version") if env.get("may-fail") is not None: name_parts.append("May fail") env["name"] = "|".join(name_parts) if args.output_format == "gha": # Output for GitHub Actions (GHA). Appends the configuration to # the file named in the "--gha-output-file" argument. assert args.gha_output_file is not None # The generated JSON output may not contain newlines to be parsed by GHA print(f"envs={json.dumps(selected_envs)}", file=args.gha_output_file) # Print the the selected environments for easier debugging. print("Generated the following test configurations:") print(json.dumps(selected_envs, indent=2)) elif args.output_format == "json": print(json.dumps(selected_envs, indent=2)) else: assert False return 0 if __name__ == "__main__": sys.exit(main()) ================================================ FILE: .github/issue_template.md ================================================ ================================================ FILE: .github/workflows/backport.yml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause # A workflow to automatically backport pull requests to a stable branch. # # This workflow uses the Backport CLI (https://github.com/sorenlouv/backport) # under the hood, which is configured in `.backportrc.json` in the repository # root. # # See https://github.com/sorenlouv/backport-github-action for documentation # on the used action. name: Backport PRs to stable branches on: pull_request_target: # Run this workflow when a label on a PR is added, or if it's closed. types: ["labeled", "closed"] jobs: backport: name: Backport PR if: github.event.pull_request.merged == true runs-on: ubuntu-latest steps: # Run the backport action only on PRs with one of the `backport-to:` # labels applied. # # Also update `branchLabelMapping` in `.backportrc.json` when changing the # label here. # # Implementation note: GitHub's contains() is matching the full string # when operating on an array (and startsWith() does not operate on arrays # at all). Do the label matching with jq instead. - name: Check for backport labels id: check_labels run: |- labels='${{ toJSON(github.event.pull_request.labels.*.name) }}' matched=$(echo $labels | jq '.|map(select(startswith("backport-to:"))) | length') echo "matched=$matched" echo "matched=$matched" >> $GITHUB_OUTPUT - name: Backport Action if: fromJSON(steps.check_labels.outputs.matched) > 0 uses: sorenlouv/backport-github-action@9460b7102fea25466026ce806c9ebf873ac48721 # v11.0.0 with: # GITHUB_TOKEN is available by default, but the powers it has are # configurable. Follow the GitHub documentation at # https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository # to "Allow GitHub Actions to create and approve pull requests". github_token: ${{ secrets.GITHUB_TOKEN }} - name: Info log if: ${{ fromJSON(steps.check_labels.outputs.matched) > 0 && success() }} run: cat ~/.backport/backport.info.log - name: Debug log if: ${{ fromJSON(steps.check_labels.outputs.matched) > 0 && failure() }} run: cat ~/.backport/backport.debug.log ================================================ FILE: .github/workflows/benchmark.yml ================================================ name: Performance Benchmark # adapted from https://github.com/benchmark-action/github-action-benchmark#charts-on-github-pages-1 concurrency: group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }} cancel-in-progress: ${{ !(contains(github.ref, 'release/') || contains(github.ref, 'master')) }} on: push: branches: - master pull_request: branches: - master jobs: tests: if: github.repository == 'cocotb/cocotb' name: Python ${{matrix.python-version}} runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: include: - python-version: 3.9 steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Set up Python ${{matrix.python-version}} uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: ${{matrix.python-version}} - name: Install Icarus Verilog run: | sudo apt-get install -y --no-install-recommends iverilog - name: Set up NVC (Ubuntu) run: | sudo apt-get install -y --no-install-recommends llvm-dev libdw-dev flex libzstd-dev pkg-config git clone https://github.com/nickg/nvc.git cd nvc git reset --hard r1.16.0 ./autogen.sh mkdir build cd build ../configure make -j $(nproc) sudo make install - name: Run benchmark run: | pip install pytest pytest-benchmark pip install . pytest -c /dev/null tests/benchmarks --benchmark-json output.json # Pushing the benchmark requires elevated permissions to the # cocotb/cocotb-benchmark-results repository, which we only grant for # master builds, not for PR builds. - name: Generate a token to access cocotb/cocotb-benchmark-results if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }} id: generate_token uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0 with: app-id: ${{ secrets.COCOTB_CI_REPOACCESS_APP_ID }} private-key: ${{ secrets.COCOTB_CI_REPOACCESS_APP_PRIVATE_KEY }} owner: ${{ github.repository_owner }} repositories: cocotb-benchmark-results - name: Store benchmark result if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }} uses: benchmark-action/github-action-benchmark@a60cea5bc7b49e15c1f58f411161f99e0df48372 # v1.22.0 continue-on-error: true with: tool: 'pytest' output-file-path: output.json alert-threshold: '120%' fail-on-alert: true github-token: ${{ steps.generate_token.outputs.token }} auto-push: true gh-repository: 'github.com/cocotb/cocotb-benchmark-results' ================================================ FILE: .github/workflows/build-test-dev.yml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause name: CI concurrency: group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }} cancel-in-progress: ${{ !(contains(github.ref, 'release/') || contains(github.ref, 'master')) }} on: # Run this workflow on every push to the master branch, or a stable branch. push: branches: - master - "stable/**" pull_request: branches: - master - 'stable/**' paths-ignore: # Skip running tests for changes only in: # Documentation - 'docs/**' # Dot-files not related to running tests - '.devcontainer/**' - '.backportrc.json' - '.clang-format' - '.git-blame-ignore-revs' - '.gitignore' - '.pre-commit-config.yaml' - '.readthedocs.yml' # Information files - 'LICENSE' - 'README.md' - 'CONTRIBUTING.md' - 'MANIFEST.in' # Github files that aren't related to testing - '.github/issue_template.md' - '.github/PULL_REQUEST_TEMPLATE.md' - '.github/workflows/backport.yml' - '.github/workflows/benchmark.yml' - '.github/workflows/ecosystem-compat.yml' - '.github/workflows/experimental.yml' - '.github/workflows/extended.yml' - '.github/workflows/stale.yml' jobs: test_dev: name: Regression Tests uses: ./.github/workflows/regression-tests.yml with: test_task: dev_test collect_coverage: true group: ci-free test_dev_licensed: if: github.repository == 'cocotb/cocotb' name: Regression Tests uses: ./.github/workflows/regression-tests.yml with: test_task: dev_test collect_coverage: true group: ci-licensed ================================================ FILE: .github/workflows/build-test-release.yml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause # # DO NOT RENAME THIS FILE! # PyPi uploads use OIDC, aka Trusted Publishing, to avoid the need for API keys. # https://pypi.org/manage/project/cocotb/settings/publishing/ is configured to # allow uploads from the cocotb GitHub project and this exact file name. # name: Release concurrency: group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }} cancel-in-progress: ${{ !(contains(github.ref, 'release/') || contains(github.ref, 'master')) }} on: # Run this workflow on every push to master or to a stable branch. push: branches: - master - "stable/**" tags: - 'v*' jobs: build_release: name: Build distribution on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: fail-fast: false # Keep going even if one matrix build fails. matrix: os: - ubuntu-22.04 - windows-2022 - macos-15-intel # x86_64 - macos-14 # ARM64 steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: "3.12" - name: Install uv uses: astral-sh/setup-uv@v7 - name: Install nox run: python3 -m pip install nox nox-uv # Use the cibuildwheel configuration inside nox, instead of the # cibuildwheel GitHub Action, to make the process easy to reproduce # locally. - name: Build cocotb release run: | nox -s release_clean nox -s release_build - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 with: name: cocotb-dist-${{ matrix.os }} path: | dist/*.whl dist/*.tar.gz # This tests both the sdist and wheel builds in separate venvs back to back with just the pytest tests. test_release: name: Regression Tests needs: build_release uses: ./.github/workflows/regression-tests.yml with: test_task: release_test download_artifacts: true group: ci-free test_release_licensed: name: Regression Tests needs: build_release uses: ./.github/workflows/regression-tests.yml with: test_task: release_test download_artifacts: true group: ci-licensed deploy_pypi: name: Deploy to pypi.org needs: - test_release - test_release_licensed permissions: id-token: write runs-on: ubuntu-22.04 # Only upload tagged releases. if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: "3.12" - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 with: path: dist pattern: cocotb-dist-* merge-multiple: true - name: Publish distribution to PyPI uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0 # Authentication to PyPi is done through OIDC ("Trusted Publishing"). ================================================ FILE: .github/workflows/ecosystem-compat.yml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause # Tests to ensure that projects depending on cocotb continue to work with the # latest development version of cocotb. # # Generally, we test the development version of cocotb against supported, # released versions of the other projects. (It is expected that the projects # themselves test their in-development code against the released version of # cocotb.) name: Ecosystem compatibility tests on: # Run daily at midnight (UTC). schedule: - cron: '0 0 * * *' # Allow triggering a CI run from the web UI. workflow_dispatch: jobs: cocotb-coverage: if: github.repository == 'cocotb/cocotb' name: Test cocotb-coverage runs-on: ubuntu-24.04 strategy: matrix: cocotb-coverage-version: [ "2.0" ] steps: - name: Set up Python uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: "3.11" - name: Install Icarus Verilog run: sudo apt-get install -y --no-install-recommends iverilog - name: Checkout cocotb-coverage repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: repository: mciepluc/cocotb-coverage path: cocotb-coverage ref: v${{ matrix.cocotb-coverage-version }} - name: Install the release version of cocotb-coverage run: pip install ./cocotb-coverage - name: Install the development version of cocotb run: pip install git+https://github.com/cocotb/cocotb.git - name: Run tests if: ${{ matrix.cocotb-coverage-version }} == '2.0' env: SIM: icarus TOPLEVEL_LANG: verilog run: | pip install pytest cocotb-bus numpy cd cocotb-coverage make -k -C tests make -C examples/fifo/tests make -C examples/pkt_switch/tests pyuvm: if: github.repository == 'cocotb/cocotb' name: Test pyuvm runs-on: ubuntu-24.04 strategy: matrix: pyuvm-version: [ "4.0.1" ] steps: - name: Set up Python uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: "3.11" - name: Install Icarus Verilog run: sudo apt-get install -y --no-install-recommends iverilog - name: Install NVC uses: nickg/setup-nvc@v1 with: version: r1.19.3 - name: Checkout pyuvm repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: repository: pyuvm/pyuvm path: pyuvm ref: ${{ matrix.pyuvm-version }} - name: Install pyuvm run: pip install ./pyuvm - name: Install the development version of cocotb run: pip install git+https://github.com/cocotb/cocotb.git - name: Run tests if: ${{ matrix.pyuvm-version }} == '4.0.1' working-directory: pyuvm env: VERILOG_SIM: icarus VHDL_SIM: nvc run: | pip install pytest pytest make cocotb_tests forastero: if: github.repository == 'cocotb/cocotb' name: Test Forastero runs-on: ubuntu-24.04 strategy: matrix: forastero-version: [ "1.2.1" ] steps: - name: Set up Python uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: "3.13" - name: Install Icarus Verilog run: sudo apt-get install -y --no-install-recommends iverilog - name: Checkout Forastero repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: repository: intuity/forastero path: forastero ref: ${{ matrix.forastero-version }} - name: Install the release version of Forastero run: pip install ./forastero - name: Install the development version of cocotb run: pip install git+https://github.com/cocotb/cocotb.git - name: Run tests working-directory: forastero env: SIM: icarus TOPLEVEL_LANG: verilog run: | make -C examples/arbiter_strict make -C examples/arbiter_window ================================================ FILE: .github/workflows/experimental.yml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause # A workflow where the tool is the moving target, or the tool is known to be # not fully supported by cocotb, but we still want to see how its support # evolves over time. name: Test experimental tool versions on: # Run every Monday at 2am (UTC). schedule: - cron: '0 2 * * 1' # Allow triggering a CI run from the web UI. workflow_dispatch: jobs: test_dev: if: github.repository == 'cocotb/cocotb' name: Regression Tests uses: ./.github/workflows/regression-tests.yml with: test_task: dev_test group: experimental ================================================ FILE: .github/workflows/extended.yml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause # Run tests against an extended set of simulator (versions). name: Test extended tool versions on: # Run every Sunday at 2am (UTC). schedule: - cron: '0 2 * * 0' # Allow triggering a CI run from the web UI. workflow_dispatch: jobs: test_dev: if: github.repository == 'cocotb/cocotb' name: Regression Tests uses: ./.github/workflows/regression-tests.yml with: test_task: dev_test group: extended max_parallel: 5 ================================================ FILE: .github/workflows/regression-tests.yml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause name: Regression Tests on: workflow_call: inputs: test_task: required: true type: string default: dev_test collect_coverage: required: false type: boolean default: false download_artifacts: required: false type: boolean default: false group: required: false type: string default: "ci-free" description: Group of environments to run the tests against. Leave empty to run them all. max_parallel: required: false type: number default: 0 description: Maximum number of parallel matrix jobs setup_python: required: false type: string description: Which Github Action to setup Python jobs: generate_envs: runs-on: ubuntu-latest name: Generate a list of environments to run tests against steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - run: ./.github/generate-envs.py --output-format=gha --gha-output-file="$GITHUB_OUTPUT" --group="${{inputs.group}}" id: run_generate_script outputs: envs: ${{ steps.run_generate_script.outputs.envs }} tests: needs: generate_envs name: ${{matrix.name}} runs-on: ${{matrix.runs-on}} timeout-minutes: 60 env: SIM: ${{matrix.sim}} TOPLEVEL_LANG: ${{matrix.lang}} CXX: ${{matrix.cxx || 'g++'}} CC: ${{matrix.cc || 'gcc'}} OS: ${{matrix.os}} PYTHON_VERSION: ${{matrix.python-version}} strategy: fail-fast: false matrix: include: ${{ fromJson(needs.generate_envs.outputs.envs) }} max-parallel: ${{ inputs.max_parallel }} steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: # GitHub PR's create a merge commit, and Actions are run on that commit. # Codecov's uploader needs the previous commit (tip of PR branch) to associate coverage correctly. # A fetch depth of 2 provides enough information without fetching the entire history. fetch-depth: 2 - name: setup ccache uses: hendrikmuhs/ccache-action@33522472633dbd32578e909b315f5ee43ba878ce # v1.2.22 if: ${{ startsWith(matrix.os, 'ubuntu') }} with: key: ${{ matrix.os }}-${{matrix.cc || 'gcc'}} - name: setup ccache path if: ${{ startsWith(matrix.os, 'ubuntu') }} run: | echo "/usr/lib/ccache:/usr/local/opt/ccache/libexec" >> $GITHUB_PATH # Use the ccache cache for all compilers. - name: Update package index (ubuntu) if: startsWith(matrix.os, 'ubuntu') run: sudo apt-get update # Download distribution artifacts (if any). - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 if: ${{ inputs.download_artifacts }} with: path: dist pattern: cocotb-dist-* merge-multiple: true # Install Python - name: Set up Python ${{matrix.python-version}} (setup-python) if: matrix.setup_python == '' uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: ${{matrix.python-version}} allow-prereleases: true - name: Install msys2 (Windows) if: startsWith(matrix.os, 'windows') && matrix.sim == 'icarus' uses: msys2/setup-msys2@v2 with: msystem: MINGW64 update: true install: > base-devel gperf mingw-w64-x86_64-toolchain # Install - name: Install XML-dependencies for Python 3.14 if: startsWith(matrix.python-version, '3.14') && startsWith(matrix.os, 'ubuntu') run: sudo apt-get install -y --no-install-recommends libxml2-dev libxslt-dev # Run tests that don't need a simulator. - name: Install uv uses: astral-sh/setup-uv@v7 - name: Install Python testing dependencies run: | pip install nox nox-uv # Install Icarus - name: Set up Icarus (Ubuntu - apt) if: startsWith(matrix.os, 'ubuntu') && matrix.sim == 'icarus' && matrix.sim-version == 'apt' run: | sudo apt-get install -y --no-install-recommends iverilog - name: Set up Icarus (Ubuntu - source) if: startsWith(matrix.os, 'ubuntu') && matrix.sim == 'icarus' && matrix.sim-version != 'apt' run: | sudo apt-get install -y --no-install-recommends g++ gperf flex bison make autoconf git clone https://github.com/steveicarus/iverilog.git cd iverilog git reset --hard ${{matrix.sim-version}} bash ./autoconf.sh # Icarus 11.0 fails to configure under Ubuntu 22.04 without explicitly # telling it where to find the preprocessor. CXXCPP=/usr/bin/cpp bash ./configure make -j $(nproc) sudo make install - name: Set up Icarus (Windows - source - pt. 1) if: startsWith(matrix.os, 'windows') && matrix.sim == 'icarus' run: | git config --global core.autocrlf input git clone https://github.com/steveicarus/iverilog.git cd iverilog git reset --hard ${{matrix.sim-version}} - name: Set up Icarus (Windows - source - pt. 2) if: startsWith(matrix.os, 'windows') && matrix.sim == 'icarus' timeout-minutes: 10 shell: msys2 {0} env: MINGW_ARCH: MINGW64 run: | cd iverilog cd msys2 makepkg-mingw --noconfirm --noprogressbar -sCLf - name: Set up Icarus (Windows - source - pt. 3) if: startsWith(matrix.os, 'windows') && matrix.sim == 'icarus' timeout-minutes: 10 shell: msys2 {0} env: MINGW_ARCH: MINGW64 run: | pacman -U --noconfirm iverilog/msys2/*.zst echo "$(dirname $(cygpath -m $(which iverilog)))" >> $GITHUB_PATH - name: Set up Icarus (MacOS - homebrew --HEAD) if: startsWith(matrix.os, 'macos') && matrix.sim == 'icarus' && matrix.sim-version == 'homebrew-HEAD' run: | brew install icarus-verilog --HEAD - name: Set up Icarus (MacOS - homebrew) if: startsWith(matrix.os, 'macos') && matrix.sim == 'icarus' && matrix.sim-version == 'homebrew-stable' run: | brew install icarus-verilog # Install GHDL - name: Set up GHDL (Ubuntu) if: startsWith(matrix.os, 'ubuntu') && matrix.sim == 'ghdl' run: | sudo apt-get install -y --no-install-recommends gnat git clone https://github.com/ghdl/ghdl.git cd ghdl git reset --hard ${{matrix.sim-version}} mkdir build cd build ../configure make -j $(nproc) sudo make install # Install NVC - name: Set up NVC (Ubuntu, release) if: startsWith(matrix.os, 'ubuntu') && matrix.sim == 'nvc' && startsWith(matrix.sim-version, 'r') && matrix.cc != 'clang' uses: nickg/setup-nvc@v1 with: version: ${{matrix.sim-version}} - name: Set up NVC (Windows, release) if: startsWith(matrix.os, 'windows') && matrix.sim == 'nvc' uses: nickg/setup-nvc@v1 with: version: ${{matrix.sim-version}} - name: Set up NVC (Ubuntu, source) if: startsWith(matrix.os, 'ubuntu') && matrix.sim == 'nvc' && (!startsWith(matrix.sim-version, 'r') || matrix.cc == 'clang') run: | sudo apt-get install -y --no-install-recommends llvm-dev libdw-dev flex git clone --depth=1 --no-single-branch https://github.com/nickg/nvc.git cd nvc git reset --hard ${{matrix.sim-version}} ./autogen.sh mkdir build cd build ../configure make -j $(nproc) sudo make install # Install Verilator (Linux) - name: Set up Verilator (Ubuntu - source) if: startsWith(matrix.os, 'ubuntu') && matrix.sim == 'verilator' run: | sudo apt-get install -y --no-install-recommends help2man make g++ perl python3 autoconf flex bison libfl2 libfl-dev zlib1g zlib1g-dev git clone https://github.com/verilator/verilator.git cd verilator git reset --hard ${{matrix.sim-version}} autoconf ./configure make -j $(nproc) sudo make install # Install Verilator (MacOS) - name: Set up Verilator (MacOS - source) if: startsWith(matrix.os, 'macos') && matrix.sim == 'verilator' run: | brew install autoconf help2man git clone https://github.com/verilator/verilator.git cd verilator git reset --hard ${{matrix.sim-version}} autoconf ./configure make -j $(sysctl -n hw.logicalcpu) sudo make install # Windows Testing - name: Test (Windows) if: startsWith(matrix.os, 'windows') id: windowstesting continue-on-error: ${{matrix.may-fail || false}} run: | nox -k "${{ inputs.test_task }} and ${{ matrix.sim }} and ${{ matrix.lang }}" env: COCOTB_ANSI_OUTPUT: 1 COCOTB_CI_SKIP_MAKE: 1 # Ubuntu / MacOS Testing - name: Install cocotb build dependencies (Ubuntu - g++) if: startsWith(matrix.os, 'ubuntu') && (!matrix.cxx || matrix.cxx == 'g++') run: | sudo apt-get install g++ - name: Install cocotb build dependencies (Ubuntu - clang++) if: startsWith(matrix.os, 'ubuntu') && matrix.cxx == 'clang++' run: | sudo apt-get install clang llvm - name: Install cocotb build dependencies (MacOS) if: startsWith(matrix.os, 'macos') run: | g++ --version - name: Test (Ubuntu, MacOS) id: unixtesting if: startsWith(matrix.os, 'ubuntu') || startsWith(matrix.os, 'macos') continue-on-error: ${{matrix.may-fail || false}} timeout-minutes: 40 run: | if [ "${{matrix.self-hosted}}" == "true" ]; then module load "${{ matrix.sim-version }}" fi nox -k "${{ inputs.test_task }} and ${{ matrix.sim }} and ${{ matrix.lang }}" env: COCOTB_ANSI_OUTPUT: 1 # codecov - name: Combine and report coverage if: inputs.collect_coverage && matrix.cxx != 'clang++' run: nox -s dev_coverage_report - name: Combine and report coverage (clang) if: inputs.collect_coverage && matrix.cxx == 'clang++' run: nox -s dev_coverage_report -- 'llvm-cov gcov' - name: Upload to codecov if: inputs.collect_coverage uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0 env: # https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954 CODECOV_TOKEN: 669f2048-851e-479e-a618-8fa64f3736cc with: files: .python_coverage.xml,.cpp_coverage.xml name: ${{ matrix.name }} env_vars: SIM,TOPLEVEL_LANG,CXX,OS,PYTHON_VERSION verbose: true ================================================ FILE: .github/workflows/stale.yml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause name: "Stale Questions" on: schedule: - cron: "00 02 * * *" jobs: stale: runs-on: ubuntu-latest steps: - uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0 with: repo-token: ${{secrets.GITHUB_TOKEN}} days-before-stale: 30 days-before-close: 7 stale-issue-message: > Has your question been resolved? If so please close this issue. If it has not been resolved, you may need to provide more information. If no more activity on this issue occurs in 7 days, it will be closed. stale-issue-label: "status:stale" stale-pr-message: > Are you still actively working on this pull request? You may have requested changes that need to be addressed. If the maintainers aren't being timely with a review, we apologize. Please bump this pull request to keep it alive. If no more activity on this pull request occurs in 7 days, it will be closed. stale-pr-label: "status:stale" any-of-labels: "type:question,status:close?,status:needs-info" operations-per-run: 30 ================================================ FILE: .gitignore ================================================ *.py[cod] # C extensions *.so *.dll # C objects *.o # Python and C++ code coverage .coverage .coverage.cocotb *.gcno *.gcda *.gcov *.xml # Packaging *.egg *.egg-info dist build eggs parts sdist obj develop-eggs .installed.cfg __pycache__ .tox .nox .eggs # Vim tmp files *.swp *~ # VSCode project files .vscode/ *.code-workspace # Emacs tmp files \#*\# \.\#* # Mergetool tmp files *.orig *.bak # Waveforms *.vcd *.fst *.fst.hier # Results results.xml combined_results.xml # Debuggers .gdb_history # VCS files *.tab sim_build ucli.key # Pytest .pytest_cache # Cadence Incisive/Xcelium *.elog irun.log xrun.log irun.key xrun.key irun.history xrun.history INCA_libs xcelium.d ncelab_*.err xmelab_*.err ncsim_*.err xmsim_*.err bpad_*.err .bpad/ .simvision/ waves.shm/ # Interface libraries built on setup src/cocotb/libs pip-wheel-metadata/ # Mentor Modelsim/Questa tests/test_cases/*/modelsim.ini tests/test_cases/*/transcript tests/test_cases/*/qwave.db tests/test_cases/*/design.bin examples/*/tests/modelsim.ini examples/*/tests/transcript examples/*/tests/qwave.db examples/*/tests/design.bin *.wlf qrun.log visualizer.log # Riviera tests/test_cases/*/library.cfg tests/test_cases/*/dataset.asdb tests/test_cases/*/compile examples/*/tests/library.cfg examples/*/tests/dataset.asdb examples/*/tests/compile # Tachyon DA CVC tests/test_cases/*/verilog.log examples/*/tests/verilog.log # DSim dsim* metrics.db # Build artifacts /dist/ # clangd compile_commands.json compile_commands.*.json .cache/ ================================================ FILE: .pre-commit-config.yaml ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause exclude: "^.*/_vendor/" repos: # ruff with --fix should run before other formatting tools - repo: https://github.com/astral-sh/ruff-pre-commit rev: "v0.15.10" hooks: # Run the linter. - id: "ruff" args: - "--fix" - "--exit-non-zero-on-fix" # Run the formatter. - id: ruff-format - repo: "https://github.com/pre-commit/mirrors-clang-format" rev: "v22.1.3" hooks: - id: "clang-format" exclude: "^src/cocotb/share/include/(sv_vpi|vhpi|vpi)_user(_ext)?.h" types_or: [c, c++] - repo: "https://github.com/pre-commit/pre-commit-hooks" rev: "v6.0.0" hooks: - id: "trailing-whitespace" - id: "mixed-line-ending" args: - "--fix=lf" - id: "end-of-file-fixer" exclude: "^docs/source/diagrams/(svg|xml)/" - repo: https://github.com/henryiii/validate-pyproject-schema-store rev: "2026.04.11" hooks: - id: validate-pyproject files: pyproject.toml - repo: local hooks: - id: "git-diff" name: git diff entry: git diff --exit-code language: system pass_filenames: false always_run: true - repo: https://github.com/codespell-project/codespell rev: v2.4.2 hooks: - id: codespell additional_dependencies: - tomli - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.19.1 hooks: - id: mypy args: [--follow-imports=silent] additional_dependencies: - pytest - pluggy - coverage - nox - nox-uv files: ^(src/cocotb/|src/pygpi/|noxfile\.py|src/cocotb_tools/pytest/) - repo: https://github.com/astral-sh/uv-pre-commit rev: 0.11.6 hooks: - id: uv-lock ci: autofix_prs: false ================================================ FILE: .readthedocs.yml ================================================ # .readthedocs.yml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details version: 2 build: os: ubuntu-22.04 tools: python: "3.12" apt_packages: - graphviz jobs: create_environment: - asdf plugin add uv - asdf install uv latest - asdf global uv latest - UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH uv sync --all-extras --group docs install: - "true" sphinx: configuration: docs/source/conf.py ================================================ FILE: CONTRIBUTING.md ================================================ # cocotb Contribution Guidelines cocotb welcomes contributions from anyone! Please have a look at the [Development & Community](https://docs.cocotb.org/en/development/contributing.html) section of the cocotb documentation for an in-depth contribution guide. ================================================ FILE: LICENSE ================================================ Copyright cocotb contributors Copyright (c) 2013 Potential Ventures Ltd Copyright (c) 2013 SolarFlare Communications Inc All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================ FILE: MANIFEST.in ================================================ recursive-include src/cocotb/share * recursive-include src/cocotb/_vendor * include README.md include LICENSE include cocotb_build_libs.py ================================================ FILE: Makefile ================================================ # Copyright cocotb contributors # Copyright (c) 2013 Potential Ventures Ltd # Copyright (c) 2013 SolarFlare Communications Inc # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause REPO_ROOT := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) .PHONY: all all: test .PHONY: clean clean: -@find . -name "obj" -exec rm -rf {} + -@find . -name "*.pyc" -delete -@find . -name "*results.xml" -delete $(MAKE) -C examples clean $(MAKE) -C tests clean .PHONY: do_tests do_tests:: $(MAKE) -C tests do_tests:: $(MAKE) -C examples # For Jenkins we use the exit code to detect compile errors or catastrophic # failures and the XML to track test results .PHONY: jenkins jenkins: do_tests python -m cocotb_tools.combine_results --repo-root $(REPO_ROOT) --suppress_rc --testsuites_name=cocotb_regression # By default want the exit code to indicate the test results .PHONY: test test: $(MAKE) do_tests; ret=$$?; python -m cocotb_tools.combine_results --repo-root $(REPO_ROOT) && exit $$ret COCOTB_MAKEFILES_DIR = $(realpath $(shell cocotb-config --makefiles)) AVAILABLE_SIMULATORS = $(patsubst .%,%,$(suffix $(wildcard $(COCOTB_MAKEFILES_DIR)/simulators/Makefile.*))) .PHONY: help help: @echo "" @echo "This cocotb makefile has the following targets" @echo "" @echo "all, test - run regression producing combined_results.xml" @echo " (return error code produced by sub-makes)" @echo "jenkins - run regression producing combined_results.xml" @echo " (return error code 1 if any failure was found)" @echo "clean - remove build directory and all simulation artefacts" @echo "" @echo "The default simulator is Icarus Verilog." @echo "To use another, set the environment variable SIM as below." @echo "Available simulators:" @for X in $(sort $(AVAILABLE_SIMULATORS)); do \ echo export SIM=$$X; \ done @echo "" ================================================ FILE: README.md ================================================ **cocotb** is a framework empowering users to write VHDL and Verilog testbenches in Python. [![Documentation Status](https://readthedocs.org/projects/cocotb/badge/?version=development)](https://docs.cocotb.org/en/stable/) [![CI](https://github.com/cocotb/cocotb/actions/workflows/build-test-dev.yml/badge.svg?branch=master)](https://github.com/cocotb/cocotb/actions/workflows/build-test-dev.yml) [![PyPI](https://img.shields.io/pypi/dm/cocotb.svg?label=PyPI%20downloads)](https://pypi.org/project/cocotb/) [![codecov](https://codecov.io/gh/cocotb/cocotb/branch/master/graph/badge.svg)](https://codecov.io/gh/cocotb/cocotb) * Check out the [tutorial](https://docs.cocotb.org/en/stable/quickstart.html) * Read the [docs](https://docs.cocotb.org/en/stable/) * Find more info in the [wiki](https://github.com/cocotb/cocotb/wiki) * Discover [useful extensions](https://github.com/cocotb/cocotb/wiki/Further-Resources#utility-libraries-and-frameworks) * Join the discussion in the [Gitter chat room](https://gitter.im/cocotb/Lobby) * [Ask a question](https://github.com/cocotb/cocotb/discussions) * [Raise a bug / request an enhancement](https://github.com/cocotb/cocotb/issues/new) ================================================ FILE: cocotb_build_libs.py ================================================ # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations import distutils import logging import os import subprocess import sys import sysconfig import textwrap from distutils.ccompiler import get_default_compiler from distutils.file_util import copy_file from setuptools import Extension from setuptools.command.build_ext import build_ext as _build_ext logger = logging.getLogger(__name__) cocotb_share_dir = os.path.abspath( os.path.join(os.path.dirname(__file__), "src", "cocotb", "share") ) _base_warns = [ "-Wall", "-Wextra", "-Wcast-qual", "-Wwrite-strings", "-Wconversion", # -Wno-missing-field-initializers is required on GCC 4.x to prevent a # spurious warning `error: missing initializer for member ...` when # compiling `PyTypeObject type = {};` in `simulatormodule.cpp`. # (See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=36750.) This flag can be # removed once we require later GCC versions. "-Wno-missing-field-initializers", "-Werror=shadow", ] _ccx_warns = [*_base_warns, "-Wnon-virtual-dtor", "-Woverloaded-virtual"] _extra_cxx_compile_args = [ "-std=c++11", "-fvisibility=hidden", "-fvisibility-inlines-hidden", *_ccx_warns, ] if os.name != "nt": _extra_cxx_compile_args += ["-flto"] _extra_cxx_compile_args_msvc = ["/permissive-"] # Make PRI* format macros available with C++11 compiler but older libc, e.g. on RHEL6. _extra_defines = [("__STDC_FORMAT_MACROS", "")] def create_sxs_assembly_manifest( name: str, filename: str, libraries: list[str], dependency_only=False ) -> str: """ Create side-by-side (sxs) assembly manifest It contains dependencies to other assemblies (in our case the assemblies are equal to the other libraries). For more details see: - https://docs.microsoft.com/en-us/windows/win32/sbscs/assembly-manifests - https://docs.microsoft.com/en-us/windows/win32/sbscs/using-side-by-side-assemblies Args: name: The name of the assembly for which the manifest is generated, e.g. ``libgpi``. filename: The filename of the library, e.g. ``libgpi.dll``. libraries: A list of names of dependent manifests, e.g. ``["libgpi"]``. """ architecture = "amd64" if sys.maxsize > 2**32 else "x86" dependencies = [] for lib in libraries: dependencies.append( textwrap.dedent( """\ """ ) % (lib, architecture) ) if not dependency_only: manifest_body = textwrap.dedent( """\ %s """ ) % ( name, architecture, filename, textwrap.indent("".join(dependencies), " ").strip(), ) else: manifest_body = textwrap.dedent( """\ %s """ ) % (textwrap.indent("".join(dependencies), " ").strip()) return manifest_body def create_sxs_appconfig(filename): """ Create side-by-side (sxs) application configuration file. The application configuration specifies additional search paths for manifests. For more details see: https://docs.microsoft.com/en-us/windows/win32/sbscs/application-configuration-files """ config_body = textwrap.dedent( """\ """ ) dirpath = os.path.dirname(filename) os.makedirs(dirpath, exist_ok=True) with open(filename + ".2.config", "w", encoding="utf-8") as f: f.write(config_body) def create_rc_file(rc_filename, name, filename, libraries, runtime_libraries): """ Creates windows resource definition script to embed the side-by-side assembly manifest into the libraries. For more details see: https://docs.microsoft.com/en-us/windows/win32/menurc/about-resource-files """ manifest = create_sxs_assembly_manifest(name, filename, libraries) # Escape double quotes and put every line between double quotes for embedding into rc file manifest = manifest.replace('"', '""') manifest = "\n".join([f'"{x}\\r\\n"' for x in manifest.splitlines()]) rc_body = ( textwrap.dedent( """\ #pragma code_page(65001) // UTF-8 #include LANGUAGE 0x00, 0x00 ISOLATIONAWARE_MANIFEST_RESOURCE_ID RT_MANIFEST BEGIN %s END """ ) % manifest ) if runtime_libraries is not None: manifest = create_sxs_assembly_manifest( name, filename, runtime_libraries, dependency_only=True ) # Escape double quotes and put every line between double quotes for embedding into rc file manifest = manifest.replace('"', '""') manifest = "\n".join([f'"{x}\\r\\n"' for x in manifest.splitlines()]) rc_body += ( textwrap.dedent( """\ 1000 RT_MANIFEST BEGIN %s END """ ) % manifest ) with open(rc_filename, "w", encoding="utf-8") as f: f.write(rc_body) def _get_lib_ext_name(): """Get name of default library file extension on given OS.""" if os.name == "nt": ext_name = "dll" else: ext_name = "so" return ext_name class build_ext(_build_ext): def _uses_msvc(self): if self.compiler == "msvc": return True if self.compiler is None: return get_default_compiler() == "msvc" else: return getattr(self.compiler, "compiler_type", None) == "msvc" def run(self): if os.name == "nt": create_sxs_appconfig( self.get_ext_fullpath(os.path.join("cocotb", "simulator")) ) super().run() def build_extensions(self): if os.name == "nt": if self._uses_msvc(): # Initialize the compiler now so that compiler/linker flags are populated if not self.compiler.initialized: self.compiler.initialize() # Setuptools defaults to activate automatic manifest generation for msvc, # disable it here as we manually generate it to also support mingw on windows for k, ldflags in self.compiler._ldflags.items(): self.compiler._ldflags[k] = [ x for x in ldflags if not x.startswith("/MANIFEST") ] + ["/MANIFEST:NO"] self.compiler.compile_options = [ x for x in self.compiler.compile_options if not x.startswith("/W") ] + ["/W4"] ext_names = {os.path.split(ext.name)[-1] for ext in self.extensions} for ext in self.extensions: fullname = self.get_ext_fullname(ext.name) filename = self.get_ext_filename(fullname) name = os.path.split(fullname)[-1] filename = os.path.split(filename)[-1] libraries = {"lib" + lib for lib in ext.libraries}.intersection( ext_names ) rc_filename = name + ".rc" runtime_libraries = None # Strip lib prefix for msvc if self._uses_msvc(): name = name[3:] if name.startswith("lib") else name libraries = { (lib[3:] if lib.startswith("lib") else lib) for lib in libraries } if runtime_libraries is not None: runtime_libraries = { (lib[3:] if lib.startswith("lib") else lib) for lib in runtime_libraries } create_rc_file( rc_filename, name, filename, libraries, runtime_libraries ) def_dir = os.path.join(cocotb_share_dir, "def") self._gen_import_libs(def_dir) for e in self.extensions: e.library_dirs += [def_dir] super().build_extensions() def build_extension(self, ext): """Build each extension in its own temp directory to make gcov happy. A normal PEP 517 install still works as the temp directories are discarded anyway. """ lib_name = os.path.split(ext.name)[-1] if self._uses_msvc(): ext.extra_compile_args += _extra_cxx_compile_args_msvc else: ext.extra_compile_args += _extra_cxx_compile_args if os.name == "nt": # Align behavior of gcc with msvc and export only symbols marked with __declspec(dllexport) ext.extra_link_args += ["-Wl,--exclude-all-symbols"] else: ext.extra_link_args += ["-flto"] rpaths = [] if lib_name == "simulator": rpaths += ["$ORIGIN/libs"] install_name = None else: rpaths += ["$ORIGIN"] install_name = lib_name if sys.platform == "darwin": rpaths = [ rpath.replace("$ORIGIN", "@loader_path") for rpath in rpaths ] if install_name is not None: ext.extra_link_args += [ f"-Wl,-install_name,@rpath/{install_name}.so" ] if sys.platform == "linux": # Avoid a runtime dependency on libstdc++. Some simulators # ship a version of libstdc++6.so which is older than the # one cocotb has been compiled with, which will then lead to # load-time errors like "libstdc++.so.6: version # `GLIBCXX_3.4.29' not found (required by # /path/to/libcocotbvhpi_modelsim.so)." ext.extra_link_args += ["-static-libstdc++"] ext.extra_link_args += [f"-Wl,-rpath,{rpath}" for rpath in rpaths] # vpi_user.h and vhpi_user.h require that WIN32 is defined if os.name == "nt": ext.define_macros += [("WIN32", "")] old_build_temp = self.build_temp self.build_temp = os.path.join(self.build_temp, ext.name) super().build_extension(ext) self.build_temp = old_build_temp # Needed for Windows to not assume python module (generate interface in def file) def get_export_symbols(self, ext): return None # For proper cocotb library naming, based on https://github.com/cython/cython/issues/1740 def get_ext_filename(self, ext_name): """ Like the base class method, but for libraries that are not python extension: - removes the ``.cpython-36m-x86_64-linux-gnu.`` or ``-cpython-36m.`` part before the extension - replaces ``.pyd`` with ``.dll`` on windows. """ filename = _build_ext.get_ext_filename(self, ext_name) # for the simulator python extension library, leaving suffix in place if os.path.split(ext_name)[-1] == "simulator": return filename head, tail = os.path.split(filename) tail_split = tail.split(".") # mingw on msys2 uses `-` as separator tail_split = tail_split[0].split("-") # strip lib prefix if msvc is used if self._uses_msvc() and tail_split[0].startswith("lib"): tail_split[0] = tail_split[0][3:] filename_short = os.path.join(head, tail_split[0] + "." + _get_lib_ext_name()) return filename_short def finalize_options(self): """Like the base class method,but add extra library_dirs path.""" super().finalize_options() for ext in self.extensions: ext.library_dirs.append(os.path.join(self.build_lib, "cocotb", "libs")) def copy_extensions_to_source(self): """Like the base class method, but copy libs into proper directory in develop.""" build_py = self.get_finalized_command("build_py") for ext in self.extensions: fullname = self.get_ext_fullname(ext.name) filename = self.get_ext_filename(fullname) modpath = fullname.split(".") package = ".".join(modpath[:-1]) package_dir = build_py.get_package_dir(package) # unlike the method from `setuptools`, we do not call `os.path.basename` here dest_filename = os.path.join(package_dir, filename) src_filename = os.path.join(self.build_lib, filename) os.makedirs(os.path.dirname(dest_filename), exist_ok=True) copy_file(src_filename, dest_filename, verbose=self.verbose) if ext._needs_stub: self.write_stub(package_dir or os.curdir, ext, True) def _gen_import_libs(self, def_dir): """ On Windows generate import libraries that contains the code required to load the DLL (.a) based on module definition files (.def) """ for sim in ["icarus", "modelsim", "aldec", "ghdl", "nvcvhpi"]: if self._uses_msvc(): subprocess.run( [ self.compiler.lib, "/def:" + os.path.join(def_dir, sim + ".def"), "/out:" + os.path.join(def_dir, sim + ".lib"), "/machine:" + ("X64" if sys.maxsize > 2**32 else "X86"), ], check=True, ) else: subprocess.run( [ "dlltool", "-d", os.path.join(def_dir, sim + ".def"), "-l", os.path.join(def_dir, "lib" + sim + ".a"), ], check=True, ) def _get_python_lib_link(): """Get name of python library used for linking""" if sys.platform == "darwin": ld_library = sysconfig.get_config_var("LIBRARY") else: ld_library = sysconfig.get_config_var("LDLIBRARY") if ld_library is not None: python_lib_link = os.path.splitext(ld_library)[0][3:] else: python_version = sysconfig.get_python_version().replace(".", "") python_lib_link = "python" + python_version return python_lib_link def _get_python_lib(): """Get the library for embedded the python interpreter""" if os.name == "nt": python_lib = _get_python_lib_link() + "." + _get_lib_ext_name() elif sys.platform == "darwin": python_lib = os.path.join( sysconfig.get_config_var("LIBDIR"), "lib" + _get_python_lib_link() + "." ) if os.path.exists(python_lib + "dylib"): python_lib += "dylib" else: python_lib += "so" else: python_lib = "lib" + _get_python_lib_link() + "." + _get_lib_ext_name() return python_lib def _get_common_lib_ext(include_dirs, share_lib_dir): """ Defines common libraries. All libraries go into the same directory to enable loading without modifying the library path (e.g. LD_LIBRARY_PATH). """ # # libgpi # libgpi_sources = [ os.path.join(share_lib_dir, "gpi", "GpiCbHdl.cpp"), os.path.join(share_lib_dir, "gpi", "GpiCommon.cpp"), os.path.join(share_lib_dir, "gpi", "dynload.cpp"), os.path.join(share_lib_dir, "gpi", "logging.cpp"), ] libgpi_libraries = [] if sys.platform.startswith(("linux", "darwin", "cygwin", "msys")): libgpi_libraries.append("dl") # dlopen, dlerror, dlsym if os.name == "nt": libgpi_sources += ["libgpi.rc"] libgpi = Extension( os.path.join("cocotb", "libs", "libgpi"), define_macros=[ ("GPI_EXPORTS", ""), *_extra_defines, ], include_dirs=include_dirs, libraries=libgpi_libraries, sources=libgpi_sources, ) # # PyGPI # pygpi_sources = [ os.path.join(share_lib_dir, "pygpi", "bind.cpp"), os.path.join(share_lib_dir, "pygpi", "embed.cpp"), os.path.join(share_lib_dir, "pygpi", "logging.cpp"), ] if os.name == "nt": pygpi_sources += ["simulator.rc"] python_lib_dirs = [] if sys.platform == "darwin": python_lib_dirs = [sysconfig.get_config_var("LIBDIR")] libpygpi = Extension( os.path.join("cocotb", "simulator"), define_macros=[ ("PYGPI_EXPORTS", ""), *_extra_defines, ], include_dirs=include_dirs, libraries=["gpi"], library_dirs=python_lib_dirs, sources=pygpi_sources, ) # The libraries in this list are compiled in order of their appearance. # If there is a linking dependency on one library to another, # the linked library must be built first. return [libgpi, libpygpi] def _get_vpi_lib_ext( include_dirs, share_lib_dir, sim_define, extra_lib=[], extra_lib_dir=[] ): lib_name = "libcocotbvpi_" + sim_define.lower() libcocotbvpi_sources = [ os.path.join(share_lib_dir, "gpi", "vpi", "VpiImpl.cpp"), os.path.join(share_lib_dir, "gpi", "vpi", "VpiCbHdl.cpp"), os.path.join(share_lib_dir, "gpi", "vpi", "VpiObj.cpp"), os.path.join(share_lib_dir, "gpi", "vpi", "VpiIterator.cpp"), os.path.join(share_lib_dir, "gpi", "vpi", "VpiSignal.cpp"), ] if os.name == "nt": libcocotbvpi_sources += [lib_name + ".rc"] libcocotbvpi = Extension( os.path.join("cocotb", "libs", lib_name), define_macros=[("COCOTBVPI_EXPORTS", ""), (sim_define, ""), *_extra_defines], include_dirs=include_dirs, libraries=["gpi", *extra_lib], library_dirs=extra_lib_dir, sources=libcocotbvpi_sources, ) return libcocotbvpi def _get_vhpi_lib_ext( include_dirs, share_lib_dir, sim_define, extra_lib=[], extra_lib_dir=[] ): lib_name = "libcocotbvhpi_" + sim_define.lower() libcocotbvhpi_sources = [ os.path.join(share_lib_dir, "gpi", "vhpi", "VhpiImpl.cpp"), os.path.join(share_lib_dir, "gpi", "vhpi", "VhpiCbHdl.cpp"), os.path.join(share_lib_dir, "gpi", "vhpi", "VhpiObj.cpp"), os.path.join(share_lib_dir, "gpi", "vhpi", "VhpiIterator.cpp"), os.path.join(share_lib_dir, "gpi", "vhpi", "VhpiSignal.cpp"), ] if os.name == "nt": libcocotbvhpi_sources += [lib_name + ".rc"] libcocotbvhpi = Extension( os.path.join("cocotb", "libs", lib_name), include_dirs=include_dirs, define_macros=[("COCOTBVHPI_EXPORTS", ""), (sim_define, ""), *_extra_defines], libraries=["gpi", *extra_lib], library_dirs=extra_lib_dir, sources=libcocotbvhpi_sources, ) return libcocotbvhpi def get_ext(): cfg_vars = distutils.sysconfig.get_config_vars() if sys.platform == "darwin": cfg_vars["LDSHARED"] = cfg_vars["LDSHARED"].replace("-bundle", "-dynamiclib") cfg_vars["LDCXXSHARED"] = cfg_vars["LDSHARED"].replace("-bundle", "-dynamiclib") share_lib_dir = os.path.relpath(os.path.join(cocotb_share_dir, "lib")) include_dirs = [ os.path.relpath(os.path.join(cocotb_share_dir, "include")), os.path.relpath(os.path.join(os.path.dirname(__file__), "src", "cocotb")), ] ext = [] logger.info("Compiling interface libraries for cocotb ...") ext += _get_common_lib_ext(include_dirs, share_lib_dir) # # Icarus Verilog # icarus_extra_lib = [] logger.info("Compiling libraries for Icarus Verilog") if os.name == "nt": icarus_extra_lib = ["icarus"] icarus_vpi_ext = _get_vpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="ICARUS", extra_lib=icarus_extra_lib, ) ext.append(icarus_vpi_ext) # # Modelsim/Questa # modelsim_extra_lib = [] logger.info("Compiling libraries for Modelsim/Questa") if os.name == "nt": modelsim_extra_lib = ["modelsim"] modelsim_vpi_ext = _get_vpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="MODELSIM", extra_lib=modelsim_extra_lib, ) ext.append(modelsim_vpi_ext) modelsim_vhpi_ext = _get_vhpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="MODELSIM", extra_lib=modelsim_extra_lib, ) ext.append(modelsim_vhpi_ext) lib_name = "libcocotbfli_modelsim" fli_sources = [ os.path.join(share_lib_dir, "gpi", "fli", "FliImpl.cpp"), os.path.join(share_lib_dir, "gpi", "fli", "FliCbHdl.cpp"), os.path.join(share_lib_dir, "gpi", "fli", "FliObjHdl.cpp"), ] if os.name == "nt": fli_sources += [lib_name + ".rc"] fli_ext = Extension( os.path.join("cocotb", "libs", lib_name), define_macros=[("COCOTBFLI_EXPORTS", ""), *_extra_defines], include_dirs=include_dirs, libraries=["gpi", *modelsim_extra_lib], sources=fli_sources, ) ext.append(fli_ext) # # GHDL # ghdl_extra_lib = [] logger.info("Compiling libraries for GHDL") if os.name == "nt": ghdl_extra_lib = ["ghdl"] ghdl_vpi_ext = _get_vpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="GHDL", extra_lib=ghdl_extra_lib, ) ext.append(ghdl_vpi_ext) # # IUS # if os.name == "posix": logger.info("Compiling libraries for Incisive/Xcelium") ius_vpi_ext = _get_vpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="IUS" ) ext.append(ius_vpi_ext) ius_vhpi_ext = _get_vhpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="IUS" ) ext.append(ius_vhpi_ext) # # VCS # if os.name == "posix": logger.info("Compiling libraries for VCS") vcs_vpi_ext = _get_vpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="VCS" ) ext.append(vcs_vpi_ext) # # Aldec Riviera Pro # aldec_extra_lib = [] logger.info("Compiling libraries for Riviera") if os.name == "nt": aldec_extra_lib = ["aldec"] aldec_vpi_ext = _get_vpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="ALDEC", extra_lib=aldec_extra_lib, ) ext.append(aldec_vpi_ext) aldec_vhpi_ext = _get_vhpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="ALDEC", extra_lib=aldec_extra_lib, ) ext.append(aldec_vhpi_ext) # # Verilator # if os.name == "posix": logger.info("Compiling libraries for Verilator") verilator_vpi_ext = _get_vpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="VERILATOR", ) ext.append(verilator_vpi_ext) # # NVC # nvc_extra_lib = [] if os.name == "nt": nvc_extra_lib = ["nvcvhpi"] logger.info("Compiling libraries for NVC") nvc_vhpi_ext = _get_vhpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="NVC", extra_lib=nvc_extra_lib, ) ext.append(nvc_vhpi_ext) # # DSim # if os.name == "posix": logger.info("Compiling libraries for DSim") dsim_vpi_ext = _get_vpi_lib_ext( include_dirs=include_dirs, share_lib_dir=share_lib_dir, sim_define="DSim" ) ext.append(dsim_vpi_ext) return ext ================================================ FILE: docs/.gitignore ================================================ .venv source/master-notes.rst source/doxygen ================================================ FILE: docs/Doxyfile ================================================ # Doxyfile 1.9.8 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. # # All text after a double hash (##) is considered a comment and is placed in # front of the TAG it is preceding. # # All text after a single hash (#) is considered a comment and will be ignored. # The format is: # TAG = value [value, ...] # For lists, items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (\" \"). # # Note: # # Use doxygen to compare the used configuration file with the template # configuration file: # doxygen -x [configFile] # Use doxygen to compare the used configuration file with the template # configuration file without replacing the environment variables or CMake type # replacement variables: # doxygen -x_noenv [configFile] #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the configuration # file that follow. The default is UTF-8 which is also the encoding used for all # text before the first occurrence of this tag. Doxygen uses libiconv (or the # iconv built into libc) for the transcoding. See # https://www.gnu.org/software/libiconv/ for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded by # double-quotes, unless you are using Doxywizard) that should identify the # project for which the documentation is generated. This name is used in the # title of most generated pages and in a few other places. # The default value is: My Project. PROJECT_NAME = cocotb # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version # control system is used. PROJECT_NUMBER = # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. PROJECT_BRIEF = # With the PROJECT_LOGO tag one can specify a logo or an icon that is included # in the documentation. The maximum height of the logo should not exceed 55 # pixels and the maximum width should not exceed 200 pixels. Doxygen will copy # the logo to the output directory. PROJECT_LOGO = # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path # into which the generated documentation will be written. If a relative path is # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. OUTPUT_DIRECTORY = source/doxygen # If the CREATE_SUBDIRS tag is set to YES then doxygen will create up to 4096 # sub-directories (in 2 levels) under the output directory of each output format # and will distribute the generated files over these directories. Enabling this # option can be useful when feeding doxygen a huge amount of source files, where # putting all generated files in the same directory would otherwise causes # performance problems for the file system. Adapt CREATE_SUBDIRS_LEVEL to # control the number of sub-directories. # The default value is: NO. CREATE_SUBDIRS = NO # Controls the number of sub-directories that will be created when # CREATE_SUBDIRS tag is set to YES. Level 0 represents 16 directories, and every # level increment doubles the number of directories, resulting in 4096 # directories at level 8 which is the default and also the maximum value. The # sub-directories are organized in 2 levels, the first level always has a fixed # number of 16 directories. # Minimum value: 0, maximum value: 8, default value: 8. # This tag requires that the tag CREATE_SUBDIRS is set to YES. CREATE_SUBDIRS_LEVEL = 8 # If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII # characters to appear in the names of generated files. If set to NO, non-ASCII # characters will be escaped, for example _xE3_x81_x84 will be used for Unicode # U+3044. # The default value is: NO. ALLOW_UNICODE_NAMES = NO # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Bulgarian, # Catalan, Chinese, Chinese-Traditional, Croatian, Czech, Danish, Dutch, English # (United States), Esperanto, Farsi (Persian), Finnish, French, German, Greek, # Hindi, Hungarian, Indonesian, Italian, Japanese, Japanese-en (Japanese with # English messages), Korean, Korean-en (Korean with English messages), Latvian, # Lithuanian, Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, # Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, # Swedish, Turkish, Ukrainian and Vietnamese. # The default value is: English. OUTPUT_LANGUAGE = English # If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. # The default value is: YES. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief # description of a member or function before the detailed description # # Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. # The default value is: YES. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator that is # used to form the text in various listings. Each string in this list, if found # as the leading text of the brief description, will be stripped from the text # and the result, after processing the whole list, is used as the annotated # text. Otherwise, the brief description is used as-is. If left blank, the # following values are used ($name is automatically replaced with the name of # the entity):The $name class, The $name widget, The $name file, is, provides, # specifies, contains, represents, a, an and the. ABBREVIATE_BRIEF = # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # doxygen will generate a detailed section even if there is only a brief # description. # The default value is: NO. ALWAYS_DETAILED_SEC = NO # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. # The default value is: NO. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used # The default value is: YES. FULL_PATH_NAMES = NO # The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. # Stripping is only done if one of the specified strings matches the left-hand # part of the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the path to # strip. # # Note that you can specify absolute paths here, but also relative paths, which # will be relative from the directory where doxygen is started. # This tag requires that the tag FULL_PATH_NAMES is set to YES. STRIP_FROM_PATH = # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the # path mentioned in the documentation of a class, which tells the reader which # header file to include in order to use a class. If left blank only the name of # the header file containing the class definition is used. Otherwise one should # specify the list of include paths that are normally passed to the compiler # using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't # support long names like on DOS, Mac, or CD-ROM. # The default value is: NO. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the # first line (until the first dot) of a Javadoc-style comment as the brief # description. If set to NO, the Javadoc-style will behave just like regular Qt- # style comments (thus requiring an explicit @brief command for a brief # description.) # The default value is: NO. JAVADOC_AUTOBRIEF = YES # If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line # such as # /*************** # as being the beginning of a Javadoc-style comment "banner". If set to NO, the # Javadoc-style will behave just like regular comments and it will not be # interpreted by doxygen. # The default value is: NO. JAVADOC_BANNER = NO # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus # requiring an explicit \brief command for a brief description.) # The default value is: NO. QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a # multi-line C++ special comment block (i.e. a block of //! or /// comments) as # a brief description. This used to be the default behavior. The new default is # to treat a multi-line C++ comment block as a detailed description. Set this # tag to YES if you prefer the old behavior instead. # # Note that setting this tag to YES also means that rational rose comments are # not recognized any more. # The default value is: NO. MULTILINE_CPP_IS_BRIEF = NO # By default Python docstrings are displayed as preformatted text and doxygen's # special commands cannot be used. By setting PYTHON_DOCSTRING to NO the # doxygen's special commands can be used and the contents of the docstring # documentation blocks is shown as doxygen documentation. # The default value is: YES. PYTHON_DOCSTRING = YES # If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the # documentation from any documented member that it re-implements. # The default value is: YES. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new # page for each member. If set to NO, the documentation of a member will be part # of the file/class/namespace that contains it. # The default value is: NO. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen # uses this value to replace tabs by spaces in code fragments. # Minimum value: 1, maximum value: 16, default value: 4. TAB_SIZE = 4 # This tag can be used to specify a number of aliases that act as commands in # the documentation. An alias has the form: # name=value # For example adding # "sideeffect=@par Side Effects:^^" # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". Note that you cannot put \n's in the value part of an alias # to insert newlines (in the resulting output). You can put ^^ in the value part # of an alias to insert a newline as if a physical newline was in the original # file. When you need a literal { or } or , in the value part of an alias you # have to escape them by means of a backslash (\), this can lead to conflicts # with the commands \{ and \} for these it is advised to use the version @{ and # @} or use a double escape (\\{ and \\}) ALIASES = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all # members will be omitted, etc. # The default value is: NO. OPTIMIZE_OUTPUT_FOR_C = NO # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or # Python sources only. Doxygen will then generate output that is more tailored # for that language. For instance, namespaces will be presented as packages, # qualified scopes will look different, etc. # The default value is: NO. OPTIMIZE_OUTPUT_JAVA = NO # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources. Doxygen will then generate output that is tailored for Fortran. # The default value is: NO. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for VHDL. # The default value is: NO. OPTIMIZE_OUTPUT_VHDL = NO # Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice # sources only. Doxygen will then generate output that is more tailored for that # language. For instance, namespaces will be presented as modules, types will be # separated into more groups, etc. # The default value is: NO. OPTIMIZE_OUTPUT_SLICE = NO # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, JavaScript, # Csharp (C#), C, C++, Lex, D, PHP, md (Markdown), Objective-C, Python, Slice, # VHDL, Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: # FortranFree, unknown formatted Fortran: Fortran. In the later case the parser # tries to guess whether the code is fixed or free formatted code, this is the # default for Fortran type files). For instance to make doxygen treat .inc files # as Fortran files (default is PHP), and .f files as C (default is Fortran), # use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. When specifying no_extension you should add # * to the FILE_PATTERNS. # # Note see also the list of default file extension mappings. EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable # documentation. See https://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. # The default value is: YES. MARKDOWN_SUPPORT = YES # When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up # to that level are automatically included in the table of contents, even if # they do not have an id attribute. # Note: This feature currently applies only to Markdown headings. # Minimum value: 0, maximum value: 99, default value: 5. # This tag requires that the tag MARKDOWN_SUPPORT is set to YES. TOC_INCLUDE_HEADINGS = 5 # The MARKDOWN_ID_STYLE tag can be used to specify the algorithm used to # generate identifiers for the Markdown headings. Note: Every identifier is # unique. # Possible values are: DOXYGEN use a fixed 'autotoc_md' string followed by a # sequence number starting at 0 and GITHUB use the lower case version of title # with any whitespace replaced by '-' and punctuation characters removed. # The default value is: DOXYGEN. # This tag requires that the tag MARKDOWN_SUPPORT is set to YES. MARKDOWN_ID_STYLE = DOXYGEN # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can # be prevented in individual cases by putting a % sign in front of the word or # globally by setting AUTOLINK_SUPPORT to NO. # The default value is: YES. AUTOLINK_SUPPORT = YES # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should set this # tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); # versus func(std::string) {}). This also make the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. # The default value is: NO. BUILTIN_STL_SUPPORT = NO # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. # The default value is: NO. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: # https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate # getter and setter methods for a property. Setting this option to YES will make # doxygen to replace the get and set methods by a property in the documentation. # This will only work if the methods are indeed getting or setting a simple # type. If this is not the case, or you want to show the methods anyway, you # should set this option to NO. # The default value is: YES. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. # The default value is: NO. DISTRIBUTE_GROUP_DOC = NO # If one adds a struct or class to a group and this option is enabled, then also # any nested class or struct is added to the same group. By default this option # is disabled and one has to add nested compounds explicitly via \ingroup. # The default value is: NO. GROUP_NESTED_COMPOUNDS = NO # Set the SUBGROUPING tag to YES to allow class member groups of the same type # (for instance a group of public functions) to be put as a subgroup of that # type (e.g. under the Public Functions section). Set it to NO to prevent # subgrouping. Alternatively, this can be done per class using the # \nosubgrouping command. # The default value is: YES. SUBGROUPING = YES # When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions # are shown inside the group in which they are included (e.g. using \ingroup) # instead of on a separate page (for HTML and Man pages) or section (for LaTeX # and RTF). # # Note that this feature does not work in combination with # SEPARATE_MEMBER_PAGES. # The default value is: NO. INLINE_GROUPED_CLASSES = NO # When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions # with only public data fields or simple typedef fields will be shown inline in # the documentation of the scope in which they are defined (i.e. file, # namespace, or group documentation), provided this scope is documented. If set # to NO, structs, classes, and unions are shown on a separate page (for HTML and # Man pages) or section (for LaTeX and RTF). # The default value is: NO. INLINE_SIMPLE_STRUCTS = NO # When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or # enum is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically be # useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. # The default value is: NO. TYPEDEF_HIDES_STRUCT = YES # The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This # cache is used to resolve symbols given their name and scope. Since this can be # an expensive process and often the same symbol appears multiple times in the # code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small # doxygen will become slower. If the cache is too large, memory is wasted. The # cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range # is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 # symbols. At the end of a run doxygen will report the cache usage and suggest # the optimal cache size from a speed point of view. # Minimum value: 0, maximum value: 9, default value: 0. LOOKUP_CACHE_SIZE = 0 # The NUM_PROC_THREADS specifies the number of threads doxygen is allowed to use # during processing. When set to 0 doxygen will based this on the number of # cores available in the system. You can set it explicitly to a value larger # than 0 to get more control over the balance between CPU load and processing # speed. At this moment only the input processing can be done using multiple # threads. Since this is still an experimental feature the default is set to 1, # which effectively disables parallel processing. Please report any issues you # encounter. Generating dot graphs in parallel is controlled by the # DOT_NUM_THREADS setting. # Minimum value: 0, maximum value: 32, default value: 1. NUM_PROC_THREADS = 1 # If the TIMESTAMP tag is set different from NO then each generated page will # contain the date or date and time when the page was generated. Setting this to # NO can help when comparing the output of multiple runs. # Possible values are: YES, NO, DATETIME and DATE. # The default value is: NO. TIMESTAMP = NO #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in # documentation are documented, even if no documentation was available. Private # class members and static file members will be hidden unless the # EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. # Note: This will also disable the warnings about undocumented members that are # normally produced when WARNINGS is set to YES. # The default value is: NO. EXTRACT_ALL = NO # If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will # be included in the documentation. # The default value is: NO. EXTRACT_PRIVATE = NO # If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual # methods of a class will be included in the documentation. # The default value is: NO. EXTRACT_PRIV_VIRTUAL = NO # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. EXTRACT_PACKAGE = NO # If the EXTRACT_STATIC tag is set to YES, all static members of a file will be # included in the documentation. # The default value is: NO. EXTRACT_STATIC = NO # If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined # locally in source files will be included in the documentation. If set to NO, # only classes defined in header files are included. Does not have any effect # for Java sources. # The default value is: YES. EXTRACT_LOCAL_CLASSES = YES # This flag is only useful for Objective-C code. If set to YES, local methods, # which are defined in the implementation section but not in the interface are # included in the documentation. If set to NO, only methods in the interface are # included. # The default value is: NO. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base name of # the file that contains the anonymous namespace. By default anonymous namespace # are hidden. # The default value is: NO. EXTRACT_ANON_NSPACES = NO # If this flag is set to YES, the name of an unnamed parameter in a declaration # will be determined by the corresponding definition. By default unnamed # parameters remain unnamed in the output. # The default value is: YES. RESOLVE_UNNAMED_PARAMS = YES # If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all # undocumented members inside documented classes or files. If set to NO these # members will be included in the various overviews, but no documentation # section is generated. This option has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. If set # to NO, these classes will be included in the various overviews. This option # will also hide undocumented C++ concepts if enabled. This option has no effect # if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend # declarations. If set to NO, these declarations will be included in the # documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any # documentation blocks found inside the body of a function. If set to NO, these # blocks will be appended to the function's detailed documentation block. # The default value is: NO. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation that is typed after a # \internal command is included. If the tag is set to NO then the documentation # will be excluded. Set it to YES to include the internal documentation. # The default value is: NO. INTERNAL_DOCS = NO # With the correct setting of option CASE_SENSE_NAMES doxygen will better be # able to match the capabilities of the underlying filesystem. In case the # filesystem is case sensitive (i.e. it supports files in the same directory # whose names only differ in casing), the option must be set to YES to properly # deal with such files in case they appear in the input. For filesystems that # are not case sensitive the option should be set to NO to properly deal with # output files written for symbols that only differ in casing, such as for two # classes, one named CLASS and the other named Class, and to also support # references to files without having to specify the exact matching casing. On # Windows (including Cygwin) and MacOS, users should typically set this option # to NO, whereas on Linux or other Unix flavors it should typically be set to # YES. # Possible values are: SYSTEM, NO and YES. # The default value is: SYSTEM. CASE_SENSE_NAMES = YES # If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with # their full class and namespace scopes in the documentation. If set to YES, the # scope will be hidden. # The default value is: NO. HIDE_SCOPE_NAMES = NO # If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will # append additional text to a page's title, such as Class Reference. If set to # YES the compound reference will be hidden. # The default value is: NO. HIDE_COMPOUND_REFERENCE= NO # If the SHOW_HEADERFILE tag is set to YES then the documentation for a class # will show which file needs to be included to use the class. # The default value is: YES. SHOW_HEADERFILE = YES # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. SHOW_INCLUDE_FILES = NO # If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each # grouped member an include statement to the documentation, telling the reader # which file to include in order to use the member. # The default value is: NO. SHOW_GROUPED_MEMB_INC = NO # If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include # files with double quotes in the documentation rather than with sharp brackets. # The default value is: NO. FORCE_LOCAL_INCLUDES = NO # If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the # documentation for inline members. # The default value is: YES. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the # (detailed) documentation of file and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. # The default value is: YES. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief # descriptions of file, namespace and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. Note that # this will also influence the order of the classes in the class list. # The default value is: NO. SORT_BRIEF_DOCS = NO # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the # (brief and detailed) documentation of class members so that constructors and # destructors are listed first. If set to NO the constructors will appear in the # respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. # Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief # member documentation. # Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting # detailed member documentation. # The default value is: NO. SORT_MEMBERS_CTORS_1ST = NO # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy # of group names into alphabetical order. If set to NO the group names will # appear in their defined order. # The default value is: NO. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by # fully-qualified names, including namespaces. If set to NO, the class list will # be sorted only by class name, not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the alphabetical # list. # The default value is: NO. SORT_BY_SCOPE_NAME = NO # If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper # type resolution of all parameters of a function it will reject a match between # the prototype and the implementation of a member function even if there is # only one candidate or it is obvious which candidate to choose by doing a # simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still # accept a match between prototype and implementation in such cases. # The default value is: NO. STRICT_PROTO_MATCHING = NO # The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo # list. This list is created by putting \todo commands in the documentation. # The default value is: YES. GENERATE_TODOLIST = YES # The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test # list. This list is created by putting \test commands in the documentation. # The default value is: YES. GENERATE_TESTLIST = YES # The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. GENERATE_BUGLIST = YES # The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional documentation # sections, marked by \if ... \endif and \cond # ... \endcond blocks. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the # initial value of a variable or macro / define can have for it to appear in the # documentation. If the initializer consists of more lines than specified here # it will be hidden. Use a value of 0 to hide initializers completely. The # appearance of the value of individual variables and macros / defines can be # controlled using \showinitializer or \hideinitializer command in the # documentation regardless of this setting. # Minimum value: 0, maximum value: 10000, default value: 30. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated at # the bottom of the documentation of classes and structs. If set to YES, the # list will mention the files that were used to generate the documentation. # The default value is: YES. SHOW_USED_FILES = NO # Set the SHOW_FILES tag to NO to disable the generation of the Files page. This # will remove the Files entry from the Quick Index and from the Folder Tree View # (if specified). # The default value is: YES. SHOW_FILES = NO # Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces # page. This will remove the Namespaces entry from the Quick Index and from the # Folder Tree View (if specified). # The default value is: YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed # by doxygen. The layout file controls the global structure of the generated # output files in an output format independent way. To create the layout file # that represents doxygen's defaults, run doxygen with the -l option. You can # optionally specify a file name after the option, if omitted DoxygenLayout.xml # will be used as the name of the layout file. See also section "Changing the # layout of pages" for information. # # Note that if you run doxygen from a directory containing a file called # DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE # tag is left empty. LAYOUT_FILE = # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool # to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. CITE_BIB_FILES = #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated to # standard output by doxygen. If QUIET is set to YES this implies that the # messages are off. # The default value is: NO. QUIET = NO # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error (stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. # # Tip: Turn warnings on while writing the documentation. # The default value is: YES. WARNINGS = YES # If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate # warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: YES. WARN_IF_UNDOCUMENTED = NO # If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as documenting some parameters in # a documented function twice, or documenting parameters that don't exist or # using markup commands wrongly. # The default value is: YES. WARN_IF_DOC_ERROR = YES # If WARN_IF_INCOMPLETE_DOC is set to YES, doxygen will warn about incomplete # function parameter documentation. If set to NO, doxygen will accept that some # parameters have no documentation without warning. # The default value is: YES. WARN_IF_INCOMPLETE_DOC = YES # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return # value. If set to NO, doxygen will only warn about wrong parameter # documentation, but not about the absence of documentation. If EXTRACT_ALL is # set to YES then this flag will automatically be disabled. See also # WARN_IF_INCOMPLETE_DOC # The default value is: NO. WARN_NO_PARAMDOC = NO # If WARN_IF_UNDOC_ENUM_VAL option is set to YES, doxygen will warn about # undocumented enumeration values. If set to NO, doxygen will accept # undocumented enumeration values. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: NO. WARN_IF_UNDOC_ENUM_VAL = NO # If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when # a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS # then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but # at the end of the doxygen process doxygen will return with a non-zero status. # If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS_PRINT then doxygen behaves # like FAIL_ON_WARNINGS but in case no WARN_LOGFILE is defined doxygen will not # write the warning messages in between other messages but write them at the end # of a run, in case a WARN_LOGFILE is defined the warning messages will be # besides being in the defined file also be shown at the end of a run, unless # the WARN_LOGFILE is defined as - i.e. standard output (stdout) in that case # the behavior will remain as with the setting FAIL_ON_WARNINGS. # Possible values are: NO, YES, FAIL_ON_WARNINGS and FAIL_ON_WARNINGS_PRINT. # The default value is: NO. WARN_AS_ERROR = NO # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which # will be replaced by the file and line number from which the warning originated # and the warning text. Optionally the format may contain $version, which will # be replaced by the version of the file (if it could be obtained via # FILE_VERSION_FILTER) # See also: WARN_LINE_FORMAT # The default value is: $file:$line: $text. WARN_FORMAT = "$file:$line: $text" # In the $text part of the WARN_FORMAT command it is possible that a reference # to a more specific place is given. To make it easier to jump to this place # (outside of doxygen) the user can define a custom "cut" / "paste" string. # Example: # WARN_LINE_FORMAT = "'vi $file +$line'" # See also: WARN_FORMAT # The default value is: at line $line of file $file. WARN_LINE_FORMAT = "at line $line of file $file" # The WARN_LOGFILE tag can be used to specify a file to which warning and error # messages should be written. If left blank the output is written to standard # error (stderr). In case the file specified cannot be opened for writing the # warning and error messages are written to standard error. When as file - is # specified the warning and error messages are written to standard output # (stdout). WARN_LOGFILE = #--------------------------------------------------------------------------- # Configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag is used to specify the files and/or directories that contain # documented source files. You may enter file names like myfile.cpp or # directories like /usr/src/myproject. Separate the files or directories with # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. INPUT = ../src/cocotb/share/include/gpi.h # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv # documentation (see: # https://www.gnu.org/software/libiconv/) for the list of possible encodings. # See also: INPUT_FILE_ENCODING # The default value is: UTF-8. INPUT_ENCODING = UTF-8 # This tag can be used to specify the character encoding of the source files # that doxygen parses The INPUT_FILE_ENCODING tag can be used to specify # character encoding on a per file pattern basis. Doxygen will compare the file # name with each pattern and apply the encoding instead of the default # INPUT_ENCODING) if there is a match. The character encodings are a list of the # form: pattern=encoding (like *.php=ISO-8859-1). See cfg_input_encoding # "INPUT_ENCODING" for further information on supported encodings. INPUT_FILE_ENCODING = # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and # *.h) to filter out the source-files in the directories. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # read by doxygen. # # Note the list of default checked file patterns might differ from the list of # default file extension mappings. # # If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cxxm, # *.cpp, *.cppm, *.c++, *.c++m, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, # *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, *.h++, *.ixx, *.l, *.cs, *.d, *.php, # *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to be # provided as doxygen C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, # *.f18, *.f, *.for, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice. FILE_PATTERNS = # The RECURSIVE tag can be used to specify whether or not subdirectories should # be searched for input files as well. # The default value is: NO. RECURSIVE = YES # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. # # Note that relative paths are relative to the directory from which doxygen is # run. EXCLUDE = # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded # from the input. # The default value is: NO. EXCLUDE_SYMLINKS = NO # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories for example use the pattern */test/* EXCLUDE_PATTERNS = # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # ANamespace::AClass, ANamespace::*Test EXCLUDE_SYMBOLS = # The EXAMPLE_PATH tag can be used to specify one or more files or directories # that contain example code fragments that are included (see the \include # command). EXAMPLE_PATH = # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all # files are included. EXAMPLE_PATTERNS = # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude commands # irrespective of the value of the RECURSIVE tag. # The default value is: NO. EXAMPLE_RECURSIVE = NO # The IMAGE_PATH tag can be used to specify one or more files or directories # that contain images that are to be included in the documentation (see the # \image command). IMAGE_PATH = # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command: # # # # where is the value of the INPUT_FILTER tag, and is the # name of an input file. Doxygen will then use the output that the filter # program writes to standard output. If FILTER_PATTERNS is specified, this tag # will be ignored. # # Note that the filter must not add or remove lines; it is applied before the # code is scanned, but not when the output code is generated. If lines are added # or removed, the anchors will not be placed correctly. # # Note that doxygen will use the data processed and written to standard output # for further processing, therefore nothing else, like debug statements or used # commands (so in case of a Windows batch file always use @echo OFF), should be # written to standard output. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. Doxygen will compare the file name with each pattern and apply the # filter if there is a match. The filters are a list of the form: pattern=filter # (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how # filters are used. If the FILTER_PATTERNS tag is empty or if none of the # patterns match the file name, INPUT_FILTER is applied. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER) will also be used to filter the input files that are used for # producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). # The default value is: NO. FILTER_SOURCE_FILES = NO # The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file # pattern. A pattern will override the setting for FILTER_PATTERN (if any) and # it is also possible to disable source filtering for a specific pattern using # *.ext= (so without naming a filter). # This tag requires that the tag FILTER_SOURCE_FILES is set to YES. FILTER_SOURCE_PATTERNS = # If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that # is part of the input, its contents will be placed on the main page # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. USE_MDFILE_AS_MAINPAGE = # The Fortran standard specifies that for fixed formatted Fortran code all # characters from position 72 are to be considered as comment. A common # extension is to allow longer lines before the automatic comment starts. The # setting FORTRAN_COMMENT_AFTER will also make it possible that longer lines can # be processed before the automatic comment starts. # Minimum value: 7, maximum value: 10000, default value: 72. FORTRAN_COMMENT_AFTER = 72 #--------------------------------------------------------------------------- # Configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will be # generated. Documented entities will be cross-referenced with these sources. # # Note: To get rid of all source code in the generated output, make sure that # also VERBATIM_HEADERS is set to NO. # The default value is: NO. SOURCE_BROWSER = NO # Setting the INLINE_SOURCES tag to YES will include the body of functions, # classes and enums directly into the documentation. # The default value is: NO. INLINE_SOURCES = NO # Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any # special comment blocks from generated source code fragments. Normal C, C++ and # Fortran comments will always remain visible. # The default value is: YES. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES then for each documented # entity all documented functions referencing it will be listed. # The default value is: NO. REFERENCED_BY_RELATION = NO # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. # The default value is: NO. REFERENCES_RELATION = NO # If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set # to YES then the hyperlinks from functions in REFERENCES_RELATION and # REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will # link to the documentation. # The default value is: YES. REFERENCES_LINK_SOURCE = YES # If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the # source code will show a tooltip with additional information such as prototype, # brief description and links to the definition and documentation. Since this # will make the HTML file larger and loading of large files a bit slower, you # can opt to disable this feature. # The default value is: YES. # This tag requires that the tag SOURCE_BROWSER is set to YES. SOURCE_TOOLTIPS = YES # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system # (see https://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global # - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # # Doxygen will invoke htags (and that will in turn invoke gtags), so these # tools must be available from the command line (i.e. in the search path). # # The result: instead of the source browser generated by doxygen, the links to # source code will now point to the output of htags. # The default value is: NO. # This tag requires that the tag SOURCE_BROWSER is set to YES. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a # verbatim copy of the header file for each class for which an include is # specified. Set to NO to disable this. # See also: Section \class. # The default value is: YES. VERBATIM_HEADERS = YES # If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the # clang parser (see: # http://clang.llvm.org/) for more accurate parsing at the cost of reduced # performance. This can be particularly helpful with template rich C++ code for # which doxygen's built-in parser lacks the necessary type information. # Note: The availability of this option depends on whether or not doxygen was # generated with the -Duse_libclang=ON option for CMake. # The default value is: NO. CLANG_ASSISTED_PARSING = NO # If the CLANG_ASSISTED_PARSING tag is set to YES and the CLANG_ADD_INC_PATHS # tag is set to YES then doxygen will add the directory of each input to the # include path. # The default value is: YES. # This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. CLANG_ADD_INC_PATHS = YES # If clang assisted parsing is enabled you can provide the compiler with command # line options that you would normally use when invoking the compiler. Note that # the include paths will already be set by doxygen for the files and directories # specified with INPUT and INCLUDE_PATH. # This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. CLANG_OPTIONS = # If clang assisted parsing is enabled you can provide the clang parser with the # path to the directory containing a file called compile_commands.json. This # file is the compilation database (see: # http://clang.llvm.org/docs/HowToSetupToolingForLLVM.html) containing the # options used when the source files were built. This is equivalent to # specifying the -p option to a clang tool, such as clang-check. These options # will then be passed to the parser. Any options specified with CLANG_OPTIONS # will be added as well. # Note: The availability of this option depends on whether or not doxygen was # generated with the -Duse_libclang=ON option for CMake. CLANG_DATABASE_PATH = #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all # compounds will be generated. Enable this if the project contains a lot of # classes, structs, unions or interfaces. # The default value is: YES. ALPHABETICAL_INDEX = YES # The IGNORE_PREFIX tag can be used to specify a prefix (or a list of prefixes) # that should be ignored while generating the index headers. The IGNORE_PREFIX # tag works for classes, function and member names. The entity will be placed in # the alphabetical list under the first letter of the entity name that remains # after removing the prefix. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. IGNORE_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output # The default value is: YES. GENERATE_HTML = NO # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of # it. # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_OUTPUT = html # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). # The default value is: .html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a user-defined HTML header file for # each generated HTML page. If the tag is left blank doxygen will generate a # standard header. # # To get valid HTML the header file that includes any scripts and style sheets # that doxygen needs, which is dependent on the configuration options used (e.g. # the setting GENERATE_TREEVIEW). It is highly recommended to start with a # default header using # doxygen -w html new_header.html new_footer.html new_stylesheet.css # YourConfigFile # and then modify the file new_header.html. See also section "Doxygen usage" # for information on how to generate the default header that doxygen normally # uses. # Note: The header is subject to change so you typically have to regenerate the # default header when upgrading to a newer version of doxygen. For a description # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_HEADER = # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard # footer. See HTML_HEADER for more information on how to generate a default # footer and what special commands can be used inside the footer. See also # section "Doxygen usage" for information on how to generate the default footer # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FOOTER = # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of # the HTML output. If left blank doxygen will generate a default style sheet. # See also section "Doxygen usage" for information on how to generate the style # sheet that doxygen normally uses. # Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as # it is more robust and this tag (HTML_STYLESHEET) will in the future become # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined # cascading style sheets that are included after the standard style sheets # created by doxygen. Using this option one can overrule certain style aspects. # This is preferred over using HTML_STYLESHEET since it does not replace the # standard style sheet and is therefore more robust against future updates. # Doxygen will copy the style sheet files to the output directory. # Note: The order of the extra style sheet files is of importance (e.g. the last # style sheet in the list overrules the setting of the previous ones in the # list). # Note: Since the styling of scrollbars can currently not be overruled in # Webkit/Chromium, the styling will be left out of the default doxygen.css if # one or more extra stylesheets have been specified. So if scrollbar # customization is desired it has to be added explicitly. For an example see the # documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_STYLESHEET = # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note # that these files will be copied to the base HTML output directory. Use the # $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these # files. In the HTML_STYLESHEET file, use the file name only. Also note that the # files will be copied as-is; there are no commands or markers available. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_FILES = # The HTML_COLORSTYLE tag can be used to specify if the generated HTML output # should be rendered with a dark or light theme. # Possible values are: LIGHT always generate light mode output, DARK always # generate dark mode output, AUTO_LIGHT automatically set the mode according to # the user preference, use light mode if no preference is set (the default), # AUTO_DARK automatically set the mode according to the user preference, use # dark mode if no preference is set and TOGGLE allow to user to switch between # light and dark mode via a button. # The default value is: AUTO_LIGHT. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE = AUTO_LIGHT # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen # will adjust the colors in the style sheet and background images according to # this color. Hue is specified as an angle on a color-wheel, see # https://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_HUE = 220 # The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors # in the HTML output. For a value of 0 the output will use gray-scales only. A # value of 255 will produce the most vivid colors. # Minimum value: 0, maximum value: 255, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_SAT = 100 # The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the # luminance component of the colors in the HTML output. Values below 100 # gradually make the output lighter, whereas values above 100 make the output # darker. The value divided by 100 is the actual gamma applied, so 80 represents # a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not # change the gamma. # Minimum value: 40, maximum value: 240, default value: 80. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_GAMMA = 80 # If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML # documentation will contain a main index with vertical navigation menus that # are dynamically created via JavaScript. If disabled, the navigation index will # consists of multiple levels of tabs that are statically embedded in every HTML # page. Disable this option to support browsers that do not have JavaScript, # like the Qt help browser. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_MENUS = YES # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_SECTIONS = NO # If the HTML_CODE_FOLDING tag is set to YES then classes and functions can be # dynamically folded and expanded in the generated HTML source code. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_CODE_FOLDING = YES # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries # shown in the various tree structured indices initially; the user can expand # and collapse entries dynamically later on. Doxygen will expand the tree to # such a level that at most the specified number of entries are visible (unless # a fully collapsed tree already exceeds this amount). So setting the number of # entries 1 will produce a full collapsed tree by default. 0 is a special value # representing an infinite number of entries and will result in a full expanded # tree by default. # Minimum value: 0, maximum value: 9999, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_INDEX_NUM_ENTRIES = 100 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development # environment (see: # https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To # create a documentation set, doxygen will generate a Makefile in the HTML # output directory. Running make will produce the docset in that directory and # running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at # startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy # genXcode/_index.html for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_DOCSET = NO # This tag determines the name of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # The default value is: Doxygen generated docs. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDNAME = "Doxygen generated docs" # This tag determines the URL of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDURL = # This tag specifies a string that should uniquely identify the documentation # set bundle. This should be a reverse domain-name style string, e.g. # com.mycompany.MyDocSet. Doxygen will append .docset to the name. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_BUNDLE_ID = org.doxygen.Project # The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify # the documentation publisher. This should be a reverse domain-name style # string, e.g. com.mycompany.MyDocSet.documentation. # The default value is: org.doxygen.Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_ID = org.doxygen.Publisher # The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. # The default value is: Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop # on Windows. In the beginning of 2021 Microsoft took the original page, with # a.o. the download links, offline the HTML help workshop was already many years # in maintenance mode). You can download the HTML help workshop from the web # archives at Installation executable (see: # http://web.archive.org/web/20160201063255/http://download.microsoft.com/downlo # ad/0/A/9/0A939EF6-E31C-430F-A3DF-DFAE7960D564/htmlhelp.exe). # # The HTML Help Workshop contains a compiler that can convert all HTML output # generated by doxygen into a single compiled HTML file (.chm). Compiled HTML # files are now used as the Windows 98 help format, and will replace the old # Windows help format (.hlp) on all Windows platforms in the future. Compressed # HTML files also contain an index, a table of contents, and you can search for # words in the documentation. The HTML workshop also contains a viewer for # compressed HTML files. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_HTMLHELP = NO # The CHM_FILE tag can be used to specify the file name of the resulting .chm # file. You can add a path in front of the file if the result should not be # written to the html output directory. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_FILE = # The HHC_LOCATION tag can be used to specify the location (absolute path # including file name) of the HTML help compiler (hhc.exe). If non-empty, # doxygen will try to run the HTML help compiler on the generated index.hhp. # The file has to be specified with full path. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated # (YES) or that it should be included in the main .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. GENERATE_CHI = NO # The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) # and project file content. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_INDEX_ENCODING = # The BINARY_TOC flag controls whether a binary table of contents is generated # (YES) or a normal table of contents (NO) in the .chm file. Furthermore it # enables the Previous and Next buttons. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members to # the table of contents of the HTML help documentation and to the tree view. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. TOC_EXPAND = NO # The SITEMAP_URL tag is used to specify the full URL of the place where the # generated documentation will be placed on the server by the user during the # deployment of the documentation. The generated sitemap is called sitemap.xml # and placed on the directory specified by HTML_OUTPUT. In case no SITEMAP_URL # is specified no sitemap is generated. For information about the sitemap # protocol see https://www.sitemaps.org # This tag requires that the tag GENERATE_HTML is set to YES. SITEMAP_URL = # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that # can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help # (.qch) of the generated HTML documentation. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify # the file name of the resulting .qch file. The path specified is relative to # the HTML output folder. # This tag requires that the tag GENERATE_QHP is set to YES. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace # (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual # Folders (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom # Filters (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom # Filters (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = # The QHG_LOCATION tag can be used to specify the location (absolute path # including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to # run qhelpgenerator on the generated .qhp file. # This tag requires that the tag GENERATE_QHP is set to YES. QHG_LOCATION = # If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be # generated, together with the HTML files, they form an Eclipse help plugin. To # install this plugin and make it available under the help contents menu in # Eclipse, the contents of the directory containing the HTML and XML files needs # to be copied into the plugins directory of eclipse. The name of the directory # within the plugins directory should be the same as the ECLIPSE_DOC_ID value. # After copying Eclipse needs to be restarted before the help appears. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_ECLIPSEHELP = NO # A unique identifier for the Eclipse help plugin. When installing the plugin # the directory name containing the HTML and XML files should also have this # name. Each documentation set should have its own identifier. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. ECLIPSE_DOC_ID = org.doxygen.Project # If you want full control over the layout of the generated HTML pages it might # be necessary to disable the index and replace it with your own. The # DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top # of each HTML page. A value of NO enables the index and the value YES disables # it. Since the tabs in the index contain the same information as the navigation # tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. DISABLE_INDEX = NO # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag # value is set to YES, a side panel will be generated containing a tree-like # index structure (just like the one that is generated for HTML Help). For this # to work a browser that supports JavaScript, DHTML, CSS and frames is required # (i.e. any modern browser). Windows users are probably better off using the # HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can # further fine tune the look of the index (see "Fine-tuning the output"). As an # example, the default style sheet generated by doxygen has an example that # shows how to put an image at the root of the tree instead of the PROJECT_NAME. # Since the tree basically has the same information as the tab index, you could # consider setting DISABLE_INDEX to YES when enabling this option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_TREEVIEW = NO # When both GENERATE_TREEVIEW and DISABLE_INDEX are set to YES, then the # FULL_SIDEBAR option determines if the side bar is limited to only the treeview # area (value NO) or if it should extend to the full height of the window (value # YES). Setting this to YES gives a layout similar to # https://docs.readthedocs.io with more room for contents, but less room for the # project logo, title, and description. If either GENERATE_TREEVIEW or # DISABLE_INDEX is set to NO, this option has no effect. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. FULL_SIDEBAR = NO # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. # # Note that a value of 0 will completely suppress the enum values from appearing # in the overview section. # Minimum value: 0, maximum value: 20, default value: 4. # This tag requires that the tag GENERATE_HTML is set to YES. ENUM_VALUES_PER_LINE = 4 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used # to set the initial width (in pixels) of the frame in which the tree is shown. # Minimum value: 0, maximum value: 1500, default value: 250. # This tag requires that the tag GENERATE_HTML is set to YES. TREEVIEW_WIDTH = 250 # If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to # external symbols imported via tag files in a separate window. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. EXT_LINKS_IN_WINDOW = NO # If the OBFUSCATE_EMAILS tag is set to YES, doxygen will obfuscate email # addresses. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. OBFUSCATE_EMAILS = YES # If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg # tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see # https://inkscape.org) to generate formulas as SVG images instead of PNGs for # the HTML output. These images will generally look nicer at scaled resolutions. # Possible values are: png (the default) and svg (looks nicer but requires the # pdf2svg or inkscape tool). # The default value is: png. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FORMULA_FORMAT = png # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML # output directory to force them to be regenerated. # Minimum value: 8, maximum value: 50, default value: 10. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_FONTSIZE = 10 # The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands # to create new LaTeX commands to be used in formulas as building blocks. See # the section "Including formulas" for details. FORMULA_MACROFILE = # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see # https://www.mathjax.org) which uses client side JavaScript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path # to it using the MATHJAX_RELPATH option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. USE_MATHJAX = NO # With MATHJAX_VERSION it is possible to specify the MathJax version to be used. # Note that the different versions of MathJax have different requirements with # regards to the different settings, so it is possible that also other MathJax # settings have to be changed when switching between the different MathJax # versions. # Possible values are: MathJax_2 and MathJax_3. # The default value is: MathJax_2. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_VERSION = MathJax_2 # When MathJax is enabled you can set the default output format to be used for # the MathJax output. For more details about the output format see MathJax # version 2 (see: # http://docs.mathjax.org/en/v2.7-latest/output.html) and MathJax version 3 # (see: # http://docs.mathjax.org/en/latest/web/components/output.html). # Possible values are: HTML-CSS (which is slower, but has the best # compatibility. This is the name for Mathjax version 2, for MathJax version 3 # this will be translated into chtml), NativeMML (i.e. MathML. Only supported # for NathJax 2. For MathJax version 3 chtml will be used instead.), chtml (This # is the name for Mathjax version 3, for MathJax version 2 this will be # translated into HTML-CSS) and SVG. # The default value is: HTML-CSS. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_FORMAT = HTML-CSS # When MathJax is enabled you need to specify the location relative to the HTML # output directory using the MATHJAX_RELPATH option. The destination directory # should contain the MathJax.js script. For instance, if the mathjax directory # is located at the same level as the HTML output directory, then # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from https://www.mathjax.org before deployment. The default value is: # - in case of MathJax version 2: https://cdn.jsdelivr.net/npm/mathjax@2 # - in case of MathJax version 3: https://cdn.jsdelivr.net/npm/mathjax@3 # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example # for MathJax version 2 (see # https://docs.mathjax.org/en/v2.7-latest/tex.html#tex-and-latex-extensions): # MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols # For example for MathJax version 3 (see # http://docs.mathjax.org/en/latest/input/tex/extensions/index.html): # MATHJAX_EXTENSIONS = ams # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_EXTENSIONS = # The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces # of code that will be used on startup of the MathJax code. See the MathJax site # (see: # http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an # example see the documentation. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_CODEFILE = # When the SEARCHENGINE tag is enabled doxygen will generate a search box for # the HTML output. The underlying search engine uses javascript and DHTML and # should work on any modern browser. Note that when using HTML help # (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) # there is already a search function so this one should typically be disabled. # For large projects the javascript based search engine can be slow, then # enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to # search using the keyboard; to jump to the search box use + S # (what the is depends on the OS and browser, but it is typically # , /